зеркало из https://github.com/microsoft/clang-1.git
Revise cleanup IR generation to fix a major bug with cleanups (PR7686)
as well as some significant asymptotic inefficiencies with threading multiple jumps through deep cleanups. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@109274 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Родитель
eb024acef8
Коммит
ff8e11579f
|
@ -98,6 +98,11 @@ void EHScopeStack::popCleanup() {
|
|||
InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
|
||||
StartOfData += Cleanup.getAllocatedSize();
|
||||
|
||||
if (empty()) NextEHDestIndex = FirstEHDestIndex;
|
||||
|
||||
// Destroy the cleanup.
|
||||
Cleanup.~EHCleanupScope();
|
||||
|
||||
// Check whether we can shrink the branch-fixups stack.
|
||||
if (!BranchFixups.empty()) {
|
||||
// If we no longer have any normal cleanups, all the fixups are
|
||||
|
@ -123,6 +128,8 @@ void EHScopeStack::popFilter() {
|
|||
EHFilterScope &Filter = cast<EHFilterScope>(*begin());
|
||||
StartOfData += EHFilterScope::getSizeForNumFilters(Filter.getNumFilters());
|
||||
|
||||
if (empty()) NextEHDestIndex = FirstEHDestIndex;
|
||||
|
||||
assert(CatchDepth > 0 && "mismatched filter push/pop");
|
||||
CatchDepth--;
|
||||
}
|
||||
|
@ -130,13 +137,16 @@ void EHScopeStack::popFilter() {
|
|||
EHCatchScope *EHScopeStack::pushCatch(unsigned NumHandlers) {
|
||||
char *Buffer = allocate(EHCatchScope::getSizeForNumHandlers(NumHandlers));
|
||||
CatchDepth++;
|
||||
return new (Buffer) EHCatchScope(NumHandlers);
|
||||
EHCatchScope *Scope = new (Buffer) EHCatchScope(NumHandlers);
|
||||
for (unsigned I = 0; I != NumHandlers; ++I)
|
||||
Scope->getHandlers()[I].Index = getNextEHDestIndex();
|
||||
return Scope;
|
||||
}
|
||||
|
||||
void EHScopeStack::pushTerminate() {
|
||||
char *Buffer = allocate(EHTerminateScope::getSize());
|
||||
CatchDepth++;
|
||||
new (Buffer) EHTerminateScope();
|
||||
new (Buffer) EHTerminateScope(getNextEHDestIndex());
|
||||
}
|
||||
|
||||
/// Remove any 'null' fixups on the stack. However, we can't pop more
|
||||
|
@ -158,20 +168,6 @@ void EHScopeStack::popNullFixups() {
|
|||
BranchFixups.pop_back();
|
||||
}
|
||||
|
||||
void EHScopeStack::resolveBranchFixups(llvm::BasicBlock *Dest) {
|
||||
assert(Dest && "null block passed to resolveBranchFixups");
|
||||
|
||||
if (BranchFixups.empty()) return;
|
||||
assert(hasNormalCleanups() &&
|
||||
"branch fixups exist with no normal cleanups on stack");
|
||||
|
||||
for (unsigned I = 0, E = BranchFixups.size(); I != E; ++I)
|
||||
if (BranchFixups[I].Destination == Dest)
|
||||
BranchFixups[I].Destination = 0;
|
||||
|
||||
popNullFixups();
|
||||
}
|
||||
|
||||
static llvm::Constant *getAllocateExceptionFn(CodeGenFunction &CGF) {
|
||||
// void *__cxa_allocate_exception(size_t thrown_size);
|
||||
const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType());
|
||||
|
@ -756,8 +752,8 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
|||
EHSelector.push_back(getPersonalityFn(*this, Personality));
|
||||
|
||||
// Accumulate all the handlers in scope.
|
||||
llvm::DenseMap<llvm::Value*, JumpDest> EHHandlers;
|
||||
JumpDest CatchAll;
|
||||
llvm::DenseMap<llvm::Value*, UnwindDest> EHHandlers;
|
||||
UnwindDest CatchAll;
|
||||
bool HasEHCleanup = false;
|
||||
bool HasEHFilter = false;
|
||||
llvm::SmallVector<llvm::Value*, 8> EHFilters;
|
||||
|
@ -773,7 +769,7 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
|||
|
||||
case EHScope::Filter: {
|
||||
assert(I.next() == EHStack.end() && "EH filter is not end of EH stack");
|
||||
assert(!CatchAll.Block && "EH filter reached after catch-all");
|
||||
assert(!CatchAll.isValid() && "EH filter reached after catch-all");
|
||||
|
||||
// Filter scopes get added to the selector in wierd ways.
|
||||
EHFilterScope &Filter = cast<EHFilterScope>(*I);
|
||||
|
@ -791,9 +787,10 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
|||
|
||||
case EHScope::Terminate:
|
||||
// Terminate scopes are basically catch-alls.
|
||||
assert(!CatchAll.Block);
|
||||
CatchAll.Block = getTerminateHandler();
|
||||
CatchAll.ScopeDepth = EHStack.getEnclosingEHCleanup(I);
|
||||
assert(!CatchAll.isValid());
|
||||
CatchAll = UnwindDest(getTerminateHandler(),
|
||||
EHStack.getEnclosingEHCleanup(I),
|
||||
cast<EHTerminateScope>(*I).getDestIndex());
|
||||
goto done;
|
||||
|
||||
case EHScope::Catch:
|
||||
|
@ -806,30 +803,32 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
|||
|
||||
// Catch-all. We should only have one of these per catch.
|
||||
if (!Handler.Type) {
|
||||
assert(!CatchAll.Block);
|
||||
CatchAll.Block = Handler.Block;
|
||||
CatchAll.ScopeDepth = EHStack.getEnclosingEHCleanup(I);
|
||||
assert(!CatchAll.isValid());
|
||||
CatchAll = UnwindDest(Handler.Block,
|
||||
EHStack.getEnclosingEHCleanup(I),
|
||||
Handler.Index);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check whether we already have a handler for this type.
|
||||
JumpDest &Dest = EHHandlers[Handler.Type];
|
||||
if (Dest.Block) continue;
|
||||
UnwindDest &Dest = EHHandlers[Handler.Type];
|
||||
if (Dest.isValid()) continue;
|
||||
|
||||
EHSelector.push_back(Handler.Type);
|
||||
Dest.Block = Handler.Block;
|
||||
Dest.ScopeDepth = EHStack.getEnclosingEHCleanup(I);
|
||||
Dest = UnwindDest(Handler.Block,
|
||||
EHStack.getEnclosingEHCleanup(I),
|
||||
Handler.Index);
|
||||
}
|
||||
|
||||
// Stop if we found a catch-all.
|
||||
if (CatchAll.Block) break;
|
||||
if (CatchAll.isValid()) break;
|
||||
}
|
||||
|
||||
done:
|
||||
unsigned LastToEmitInLoop = EHSelector.size();
|
||||
|
||||
// If we have a catch-all, add null to the selector.
|
||||
if (CatchAll.Block) {
|
||||
if (CatchAll.isValid()) {
|
||||
EHSelector.push_back(getCatchAllValue(CGF));
|
||||
|
||||
// If we have an EH filter, we need to add those handlers in the
|
||||
|
@ -878,14 +877,15 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
|||
// filter (possibly with a cleanup), a catch-all, or another catch).
|
||||
for (unsigned I = 2; I != LastToEmitInLoop; ++I) {
|
||||
llvm::Value *Type = EHSelector[I];
|
||||
JumpDest Dest = EHHandlers[Type];
|
||||
assert(Dest.Block && "no handler entry for value in selector?");
|
||||
UnwindDest Dest = EHHandlers[Type];
|
||||
assert(Dest.isValid() && "no handler entry for value in selector?");
|
||||
|
||||
// Figure out where to branch on a match. As a debug code-size
|
||||
// optimization, if the scope depth matches the innermost cleanup,
|
||||
// we branch directly to the catch handler.
|
||||
llvm::BasicBlock *Match = Dest.Block;
|
||||
bool MatchNeedsCleanup = Dest.ScopeDepth != EHStack.getInnermostEHCleanup();
|
||||
llvm::BasicBlock *Match = Dest.getBlock();
|
||||
bool MatchNeedsCleanup =
|
||||
Dest.getScopeDepth() != EHStack.getInnermostEHCleanup();
|
||||
if (MatchNeedsCleanup)
|
||||
Match = createBasicBlock("eh.match");
|
||||
|
||||
|
@ -911,7 +911,7 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
|||
|
||||
// Emit the final case in the selector.
|
||||
// This might be a catch-all....
|
||||
if (CatchAll.Block) {
|
||||
if (CatchAll.isValid()) {
|
||||
assert(isa<llvm::ConstantPointerNull>(EHSelector.back()));
|
||||
EmitBranchThroughEHCleanup(CatchAll);
|
||||
|
||||
|
@ -930,7 +930,8 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
|||
}
|
||||
|
||||
llvm::BasicBlock *CleanupContBB = createBasicBlock("ehspec.cleanup.cont");
|
||||
EmitBranchThroughEHCleanup(JumpDest(CleanupContBB, EHStack.stable_end()));
|
||||
EmitBranchThroughEHCleanup(UnwindDest(CleanupContBB, EHStack.stable_end(),
|
||||
EHStack.getNextEHDestIndex()));
|
||||
EmitBlock(CleanupContBB);
|
||||
|
||||
if (HasEHCleanup)
|
||||
|
@ -975,26 +976,7 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
|||
|
||||
// ...or a cleanup.
|
||||
} else {
|
||||
// We emit a jump to a notional label at the outermost unwind state.
|
||||
llvm::BasicBlock *Unwind = createBasicBlock("eh.resume");
|
||||
JumpDest Dest(Unwind, EHStack.stable_end());
|
||||
EmitBranchThroughEHCleanup(Dest);
|
||||
|
||||
// The unwind block. We have to reload the exception here because
|
||||
// we might have unwound through arbitrary blocks, so the landing
|
||||
// pad might not dominate.
|
||||
EmitBlock(Unwind);
|
||||
|
||||
// This can always be a call because we necessarily didn't find
|
||||
// anything on the EH stack which needs our help.
|
||||
llvm::Constant *RethrowFn;
|
||||
if (const char *RethrowName = Personality.getCatchallRethrowFnName())
|
||||
RethrowFn = getCatchallRethrowFn(CGF, RethrowName);
|
||||
else
|
||||
RethrowFn = getUnwindResumeOrRethrowFn();
|
||||
Builder.CreateCall(RethrowFn, Builder.CreateLoad(getExceptionSlot()))
|
||||
->setDoesNotReturn();
|
||||
Builder.CreateUnreachable();
|
||||
EmitBranchThroughEHCleanup(getRethrowDest());
|
||||
}
|
||||
|
||||
// Restore the old IR generation state.
|
||||
|
@ -1537,6 +1519,35 @@ llvm::BasicBlock *CodeGenFunction::getTerminateHandler() {
|
|||
return TerminateHandler;
|
||||
}
|
||||
|
||||
CodeGenFunction::UnwindDest CodeGenFunction::getRethrowDest() {
|
||||
if (RethrowBlock.isValid()) return RethrowBlock;
|
||||
|
||||
CGBuilderTy::InsertPoint SavedIP = Builder.saveIP();
|
||||
|
||||
// We emit a jump to a notional label at the outermost unwind state.
|
||||
llvm::BasicBlock *Unwind = createBasicBlock("eh.resume");
|
||||
Builder.SetInsertPoint(Unwind);
|
||||
|
||||
const EHPersonality &Personality = EHPersonality::get(CGM.getLangOptions());
|
||||
|
||||
// This can always be a call because we necessarily didn't find
|
||||
// anything on the EH stack which needs our help.
|
||||
llvm::Constant *RethrowFn;
|
||||
if (const char *RethrowName = Personality.getCatchallRethrowFnName())
|
||||
RethrowFn = getCatchallRethrowFn(*this, RethrowName);
|
||||
else
|
||||
RethrowFn = getUnwindResumeOrRethrowFn();
|
||||
|
||||
Builder.CreateCall(RethrowFn, Builder.CreateLoad(getExceptionSlot()))
|
||||
->setDoesNotReturn();
|
||||
Builder.CreateUnreachable();
|
||||
|
||||
Builder.restoreIP(SavedIP);
|
||||
|
||||
RethrowBlock = UnwindDest(Unwind, EHStack.stable_end(), 0);
|
||||
return RethrowBlock;
|
||||
}
|
||||
|
||||
EHScopeStack::Cleanup::~Cleanup() {
|
||||
llvm_unreachable("Cleanup is indestructable");
|
||||
}
|
||||
|
|
|
@ -100,15 +100,13 @@ public:
|
|||
/// The catch handler for this type.
|
||||
llvm::BasicBlock *Block;
|
||||
|
||||
static Handler make(llvm::Value *Type, llvm::BasicBlock *Block) {
|
||||
Handler Temp;
|
||||
Temp.Type = Type;
|
||||
Temp.Block = Block;
|
||||
return Temp;
|
||||
}
|
||||
/// The unwind destination index for this handler.
|
||||
unsigned Index;
|
||||
};
|
||||
|
||||
private:
|
||||
friend class EHScopeStack;
|
||||
|
||||
Handler *getHandlers() {
|
||||
return reinterpret_cast<Handler*>(this+1);
|
||||
}
|
||||
|
@ -136,7 +134,8 @@ public:
|
|||
|
||||
void setHandler(unsigned I, llvm::Value *Type, llvm::BasicBlock *Block) {
|
||||
assert(I < getNumHandlers());
|
||||
getHandlers()[I] = Handler::make(Type, Block);
|
||||
getHandlers()[I].Type = Type;
|
||||
getHandlers()[I].Block = Block;
|
||||
}
|
||||
|
||||
const Handler &getHandler(unsigned I) const {
|
||||
|
@ -184,6 +183,39 @@ class EHCleanupScope : public EHScope {
|
|||
/// created if needed before the cleanup is popped.
|
||||
llvm::BasicBlock *EHBlock;
|
||||
|
||||
/// Extra information required for cleanups that have resolved
|
||||
/// branches through them. This has to be allocated on the side
|
||||
/// because everything on the cleanup stack has be trivially
|
||||
/// movable.
|
||||
struct ExtInfo {
|
||||
/// The destinations of normal branch-afters and branch-throughs.
|
||||
llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
|
||||
|
||||
/// Normal branch-afters.
|
||||
llvm::SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
|
||||
BranchAfters;
|
||||
|
||||
/// The destinations of EH branch-afters and branch-throughs.
|
||||
/// TODO: optimize for the extremely common case of a single
|
||||
/// branch-through.
|
||||
llvm::SmallPtrSet<llvm::BasicBlock*, 4> EHBranches;
|
||||
|
||||
/// EH branch-afters.
|
||||
llvm::SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
|
||||
EHBranchAfters;
|
||||
};
|
||||
mutable struct ExtInfo *ExtInfo;
|
||||
|
||||
struct ExtInfo &getExtInfo() {
|
||||
if (!ExtInfo) ExtInfo = new struct ExtInfo();
|
||||
return *ExtInfo;
|
||||
}
|
||||
|
||||
const struct ExtInfo &getExtInfo() const {
|
||||
if (!ExtInfo) ExtInfo = new struct ExtInfo();
|
||||
return *ExtInfo;
|
||||
}
|
||||
|
||||
public:
|
||||
/// Gets the size required for a lazy cleanup scope with the given
|
||||
/// cleanup-data requirements.
|
||||
|
@ -203,8 +235,14 @@ public:
|
|||
IsNormalCleanup(IsNormal), IsEHCleanup(IsEH),
|
||||
CleanupSize(CleanupSize), FixupDepth(FixupDepth),
|
||||
EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
|
||||
NormalBlock(0), EHBlock(0)
|
||||
{}
|
||||
NormalBlock(0), EHBlock(0), ExtInfo(0)
|
||||
{
|
||||
assert(this->CleanupSize == CleanupSize && "cleanup size overflow");
|
||||
}
|
||||
|
||||
~EHCleanupScope() {
|
||||
delete ExtInfo;
|
||||
}
|
||||
|
||||
bool isNormalCleanup() const { return IsNormalCleanup; }
|
||||
llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
|
||||
|
@ -229,6 +267,102 @@ public:
|
|||
return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
|
||||
}
|
||||
|
||||
/// True if this cleanup scope has any branch-afters or branch-throughs.
|
||||
bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
|
||||
|
||||
/// Add a branch-after to this cleanup scope. A branch-after is a
|
||||
/// branch from a point protected by this (normal) cleanup to a
|
||||
/// point in the normal cleanup scope immediately containing it.
|
||||
/// For example,
|
||||
/// for (;;) { A a; break; }
|
||||
/// contains a branch-after.
|
||||
///
|
||||
/// Branch-afters each have their own destination out of the
|
||||
/// cleanup, guaranteed distinct from anything else threaded through
|
||||
/// it. Therefore branch-afters usually force a switch after the
|
||||
/// cleanup.
|
||||
void addBranchAfter(llvm::ConstantInt *Index,
|
||||
llvm::BasicBlock *Block) {
|
||||
struct ExtInfo &ExtInfo = getExtInfo();
|
||||
if (ExtInfo.Branches.insert(Block))
|
||||
ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
|
||||
}
|
||||
|
||||
/// Return the number of unique branch-afters on this scope.
|
||||
unsigned getNumBranchAfters() const {
|
||||
return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
|
||||
}
|
||||
|
||||
llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
|
||||
assert(I < getNumBranchAfters());
|
||||
return ExtInfo->BranchAfters[I].first;
|
||||
}
|
||||
|
||||
llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
|
||||
assert(I < getNumBranchAfters());
|
||||
return ExtInfo->BranchAfters[I].second;
|
||||
}
|
||||
|
||||
/// Add a branch-through to this cleanup scope. A branch-through is
|
||||
/// a branch from a scope protected by this (normal) cleanup to an
|
||||
/// enclosing scope other than the immediately-enclosing normal
|
||||
/// cleanup scope.
|
||||
///
|
||||
/// In the following example, the branch through B's scope is a
|
||||
/// branch-through, while the branch through A's scope is a
|
||||
/// branch-after:
|
||||
/// for (;;) { A a; B b; break; }
|
||||
///
|
||||
/// All branch-throughs have a common destination out of the
|
||||
/// cleanup, one possibly shared with the fall-through. Therefore
|
||||
/// branch-throughs usually don't force a switch after the cleanup.
|
||||
///
|
||||
/// \return true if the branch-through was new to this scope
|
||||
bool addBranchThrough(llvm::BasicBlock *Block) {
|
||||
return getExtInfo().Branches.insert(Block);
|
||||
}
|
||||
|
||||
/// Determines if this cleanup scope has any branch throughs.
|
||||
bool hasBranchThroughs() const {
|
||||
if (!ExtInfo) return false;
|
||||
return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
|
||||
}
|
||||
|
||||
// Same stuff, only for EH branches instead of normal branches.
|
||||
// It's quite possible that we could find a better representation
|
||||
// for this.
|
||||
|
||||
bool hasEHBranches() const { return ExtInfo && !ExtInfo->EHBranches.empty(); }
|
||||
void addEHBranchAfter(llvm::ConstantInt *Index,
|
||||
llvm::BasicBlock *Block) {
|
||||
struct ExtInfo &ExtInfo = getExtInfo();
|
||||
if (ExtInfo.EHBranches.insert(Block))
|
||||
ExtInfo.EHBranchAfters.push_back(std::make_pair(Block, Index));
|
||||
}
|
||||
|
||||
unsigned getNumEHBranchAfters() const {
|
||||
return ExtInfo ? ExtInfo->EHBranchAfters.size() : 0;
|
||||
}
|
||||
|
||||
llvm::BasicBlock *getEHBranchAfterBlock(unsigned I) const {
|
||||
assert(I < getNumEHBranchAfters());
|
||||
return ExtInfo->EHBranchAfters[I].first;
|
||||
}
|
||||
|
||||
llvm::ConstantInt *getEHBranchAfterIndex(unsigned I) const {
|
||||
assert(I < getNumEHBranchAfters());
|
||||
return ExtInfo->EHBranchAfters[I].second;
|
||||
}
|
||||
|
||||
bool addEHBranchThrough(llvm::BasicBlock *Block) {
|
||||
return getExtInfo().EHBranches.insert(Block);
|
||||
}
|
||||
|
||||
bool hasEHBranchThroughs() const {
|
||||
if (!ExtInfo) return false;
|
||||
return (ExtInfo->EHBranchAfters.size() != ExtInfo->EHBranches.size());
|
||||
}
|
||||
|
||||
static bool classof(const EHScope *Scope) {
|
||||
return (Scope->getKind() == Cleanup);
|
||||
}
|
||||
|
@ -281,10 +415,13 @@ public:
|
|||
/// An exceptions scope which calls std::terminate if any exception
|
||||
/// reaches it.
|
||||
class EHTerminateScope : public EHScope {
|
||||
unsigned DestIndex : BitsRemaining;
|
||||
public:
|
||||
EHTerminateScope() : EHScope(Terminate) {}
|
||||
EHTerminateScope(unsigned Index) : EHScope(Terminate), DestIndex(Index) {}
|
||||
static size_t getSize() { return sizeof(EHTerminateScope); }
|
||||
|
||||
unsigned getDestIndex() const { return DestIndex; }
|
||||
|
||||
static bool classof(const EHScope *Scope) {
|
||||
return Scope->getKind() == Terminate;
|
||||
}
|
||||
|
@ -344,6 +481,9 @@ public:
|
|||
return copy;
|
||||
}
|
||||
|
||||
bool encloses(iterator other) const { return Ptr >= other.Ptr; }
|
||||
bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
|
||||
|
||||
bool operator==(iterator other) const { return Ptr == other.Ptr; }
|
||||
bool operator!=(iterator other) const { return Ptr != other.Ptr; }
|
||||
};
|
||||
|
@ -363,6 +503,8 @@ inline void EHScopeStack::popCatch() {
|
|||
StartOfData += EHCatchScope::getSizeForNumHandlers(
|
||||
cast<EHCatchScope>(*begin()).getNumHandlers());
|
||||
|
||||
if (empty()) NextEHDestIndex = FirstEHDestIndex;
|
||||
|
||||
assert(CatchDepth > 0 && "mismatched catch/terminate push/pop");
|
||||
CatchDepth--;
|
||||
}
|
||||
|
@ -373,6 +515,8 @@ inline void EHScopeStack::popTerminate() {
|
|||
assert(isa<EHTerminateScope>(*begin()));
|
||||
StartOfData += EHTerminateScope::getSize();
|
||||
|
||||
if (empty()) NextEHDestIndex = FirstEHDestIndex;
|
||||
|
||||
assert(CatchDepth > 0 && "mismatched catch/terminate push/pop");
|
||||
CatchDepth--;
|
||||
}
|
||||
|
|
|
@ -793,7 +793,7 @@ void CodeGenFunction::EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S){
|
|||
|
||||
BreakContinueStack.pop_back();
|
||||
|
||||
EmitBlock(AfterBody.Block);
|
||||
EmitBlock(AfterBody.getBlock());
|
||||
|
||||
llvm::BasicBlock *FetchMore = createBasicBlock("fetchmore");
|
||||
|
||||
|
@ -829,7 +829,7 @@ void CodeGenFunction::EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S){
|
|||
LV.getAddress());
|
||||
}
|
||||
|
||||
EmitBlock(LoopEnd.Block);
|
||||
EmitBlock(LoopEnd.getBlock());
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitObjCAtTryStmt(const ObjCAtTryStmt &S) {
|
||||
|
|
|
@ -2016,13 +2016,11 @@ void CGObjCGNU::EmitTryStmt(CodeGen::CodeGenFunction &CGF,
|
|||
if (S.getFinallyStmt())
|
||||
CGF.ExitFinallyBlock(FinallyInfo);
|
||||
|
||||
if (Cont.Block) {
|
||||
if (Cont.Block->use_empty())
|
||||
delete Cont.Block;
|
||||
else {
|
||||
CGF.EmitBranch(Cont.Block);
|
||||
CGF.EmitBlock(Cont.Block);
|
||||
}
|
||||
if (Cont.isValid()) {
|
||||
if (Cont.getBlock()->use_empty())
|
||||
delete Cont.getBlock();
|
||||
else
|
||||
CGF.EmitBlock(Cont.getBlock());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2953,11 +2953,11 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
|
||||
// Pop the cleanup.
|
||||
CGF.PopCleanupBlock();
|
||||
CGF.EmitBlock(FinallyEnd.Block);
|
||||
CGF.EmitBlock(FinallyEnd.getBlock());
|
||||
|
||||
// Emit the rethrow block.
|
||||
CGF.Builder.ClearInsertionPoint();
|
||||
CGF.EmitBlock(FinallyRethrow.Block, true);
|
||||
CGF.EmitBlock(FinallyRethrow.getBlock(), true);
|
||||
if (CGF.HaveInsertPoint()) {
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionThrowFn(),
|
||||
CGF.Builder.CreateLoad(RethrowPtr))
|
||||
|
@ -2965,7 +2965,7 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
CGF.Builder.CreateUnreachable();
|
||||
}
|
||||
|
||||
CGF.Builder.SetInsertPoint(FinallyEnd.Block);
|
||||
CGF.Builder.SetInsertPoint(FinallyEnd.getBlock());
|
||||
}
|
||||
|
||||
void CGObjCMac::EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
||||
|
@ -5895,8 +5895,8 @@ void CGObjCNonFragileABIMac::EmitTryStmt(CodeGen::CodeGenFunction &CGF,
|
|||
if (S.getFinallyStmt())
|
||||
CGF.ExitFinallyBlock(FinallyInfo);
|
||||
|
||||
if (Cont.Block)
|
||||
CGF.EmitBlock(Cont.Block);
|
||||
if (Cont.isValid())
|
||||
CGF.EmitBlock(Cont.getBlock());
|
||||
}
|
||||
|
||||
/// EmitThrowStmt - Generate code for a throw statement.
|
||||
|
|
|
@ -248,32 +248,35 @@ void CodeGenFunction::EmitBranch(llvm::BasicBlock *Target) {
|
|||
CodeGenFunction::JumpDest
|
||||
CodeGenFunction::getJumpDestForLabel(const LabelStmt *S) {
|
||||
JumpDest &Dest = LabelMap[S];
|
||||
if (Dest.Block) return Dest;
|
||||
if (Dest.isValid()) return Dest;
|
||||
|
||||
// Create, but don't insert, the new block.
|
||||
Dest.Block = createBasicBlock(S->getName());
|
||||
Dest.ScopeDepth = EHScopeStack::stable_iterator::invalid();
|
||||
Dest = JumpDest(createBasicBlock(S->getName()),
|
||||
EHScopeStack::stable_iterator::invalid(),
|
||||
NextCleanupDestIndex++);
|
||||
return Dest;
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitLabel(const LabelStmt &S) {
|
||||
JumpDest &Dest = LabelMap[&S];
|
||||
|
||||
// If we didn't needed a forward reference to this label, just go
|
||||
// If we didn't need a forward reference to this label, just go
|
||||
// ahead and create a destination at the current scope.
|
||||
if (!Dest.Block) {
|
||||
if (!Dest.isValid()) {
|
||||
Dest = getJumpDestInCurrentScope(S.getName());
|
||||
|
||||
// Otherwise, we need to give this label a target depth and remove
|
||||
// it from the branch-fixups list.
|
||||
} else {
|
||||
assert(!Dest.ScopeDepth.isValid() && "already emitted label!");
|
||||
Dest.ScopeDepth = EHStack.stable_begin();
|
||||
assert(!Dest.getScopeDepth().isValid() && "already emitted label!");
|
||||
Dest = JumpDest(Dest.getBlock(),
|
||||
EHStack.stable_begin(),
|
||||
Dest.getDestIndex());
|
||||
|
||||
EHStack.resolveBranchFixups(Dest.Block);
|
||||
ResolveBranchFixups(Dest.getBlock());
|
||||
}
|
||||
|
||||
EmitBlock(Dest.Block);
|
||||
EmitBlock(Dest.getBlock());
|
||||
}
|
||||
|
||||
|
||||
|
@ -373,7 +376,7 @@ void CodeGenFunction::EmitWhileStmt(const WhileStmt &S) {
|
|||
// Emit the header for the loop, which will also become
|
||||
// the continue target.
|
||||
JumpDest LoopHeader = getJumpDestInCurrentScope("while.cond");
|
||||
EmitBlock(LoopHeader.Block);
|
||||
EmitBlock(LoopHeader.getBlock());
|
||||
|
||||
// Create an exit block for when the condition fails, which will
|
||||
// also become the break target.
|
||||
|
@ -409,13 +412,13 @@ void CodeGenFunction::EmitWhileStmt(const WhileStmt &S) {
|
|||
// As long as the condition is true, go to the loop body.
|
||||
llvm::BasicBlock *LoopBody = createBasicBlock("while.body");
|
||||
if (EmitBoolCondBranch) {
|
||||
llvm::BasicBlock *ExitBlock = LoopExit.Block;
|
||||
llvm::BasicBlock *ExitBlock = LoopExit.getBlock();
|
||||
if (ConditionScope.requiresCleanups())
|
||||
ExitBlock = createBasicBlock("while.exit");
|
||||
|
||||
Builder.CreateCondBr(BoolCondVal, LoopBody, ExitBlock);
|
||||
|
||||
if (ExitBlock != LoopExit.Block) {
|
||||
if (ExitBlock != LoopExit.getBlock()) {
|
||||
EmitBlock(ExitBlock);
|
||||
EmitBranchThroughCleanup(LoopExit);
|
||||
}
|
||||
|
@ -435,15 +438,15 @@ void CodeGenFunction::EmitWhileStmt(const WhileStmt &S) {
|
|||
ConditionScope.ForceCleanup();
|
||||
|
||||
// Branch to the loop header again.
|
||||
EmitBranch(LoopHeader.Block);
|
||||
EmitBranch(LoopHeader.getBlock());
|
||||
|
||||
// Emit the exit block.
|
||||
EmitBlock(LoopExit.Block, true);
|
||||
EmitBlock(LoopExit.getBlock(), true);
|
||||
|
||||
// The LoopHeader typically is just a branch if we skipped emitting
|
||||
// a branch, try to erase it.
|
||||
if (!EmitBoolCondBranch)
|
||||
SimplifyForwardingBlocks(LoopHeader.Block);
|
||||
SimplifyForwardingBlocks(LoopHeader.getBlock());
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitDoStmt(const DoStmt &S) {
|
||||
|
@ -463,7 +466,7 @@ void CodeGenFunction::EmitDoStmt(const DoStmt &S) {
|
|||
|
||||
BreakContinueStack.pop_back();
|
||||
|
||||
EmitBlock(LoopCond.Block);
|
||||
EmitBlock(LoopCond.getBlock());
|
||||
|
||||
// C99 6.8.5.2: "The evaluation of the controlling expression takes place
|
||||
// after each execution of the loop body."
|
||||
|
@ -482,15 +485,15 @@ void CodeGenFunction::EmitDoStmt(const DoStmt &S) {
|
|||
|
||||
// As long as the condition is true, iterate the loop.
|
||||
if (EmitBoolCondBranch)
|
||||
Builder.CreateCondBr(BoolCondVal, LoopBody, LoopExit.Block);
|
||||
Builder.CreateCondBr(BoolCondVal, LoopBody, LoopExit.getBlock());
|
||||
|
||||
// Emit the exit block.
|
||||
EmitBlock(LoopExit.Block);
|
||||
EmitBlock(LoopExit.getBlock());
|
||||
|
||||
// The DoCond block typically is just a branch if we skipped
|
||||
// emitting a branch, try to erase it.
|
||||
if (!EmitBoolCondBranch)
|
||||
SimplifyForwardingBlocks(LoopCond.Block);
|
||||
SimplifyForwardingBlocks(LoopCond.getBlock());
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
||||
|
@ -506,7 +509,7 @@ void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
|||
// If there's an increment, the continue scope will be overwritten
|
||||
// later.
|
||||
JumpDest Continue = getJumpDestInCurrentScope("for.cond");
|
||||
llvm::BasicBlock *CondBlock = Continue.Block;
|
||||
llvm::BasicBlock *CondBlock = Continue.getBlock();
|
||||
EmitBlock(CondBlock);
|
||||
|
||||
// Create a cleanup scope for the condition variable cleanups.
|
||||
|
@ -516,7 +519,7 @@ void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
|||
if (S.getCond()) {
|
||||
// If the for statement has a condition scope, emit the local variable
|
||||
// declaration.
|
||||
llvm::BasicBlock *ExitBlock = LoopExit.Block;
|
||||
llvm::BasicBlock *ExitBlock = LoopExit.getBlock();
|
||||
if (S.getConditionVariable()) {
|
||||
EmitLocalBlockVarDecl(*S.getConditionVariable());
|
||||
}
|
||||
|
@ -534,7 +537,7 @@ void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
|||
BoolCondVal = EvaluateExprAsBool(S.getCond());
|
||||
Builder.CreateCondBr(BoolCondVal, ForBody, ExitBlock);
|
||||
|
||||
if (ExitBlock != LoopExit.Block) {
|
||||
if (ExitBlock != LoopExit.getBlock()) {
|
||||
EmitBlock(ExitBlock);
|
||||
EmitBranchThroughCleanup(LoopExit);
|
||||
}
|
||||
|
@ -570,7 +573,7 @@ void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
|||
|
||||
// If there is an increment, emit it next.
|
||||
if (S.getInc()) {
|
||||
EmitBlock(Continue.Block);
|
||||
EmitBlock(Continue.getBlock());
|
||||
EmitStmt(S.getInc());
|
||||
}
|
||||
|
||||
|
@ -587,7 +590,7 @@ void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
|||
}
|
||||
|
||||
// Emit the fall-through block.
|
||||
EmitBlock(LoopExit.Block, true);
|
||||
EmitBlock(LoopExit.getBlock(), true);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitReturnOfRValue(RValue RV, QualType Ty) {
|
||||
|
@ -840,13 +843,15 @@ void CodeGenFunction::EmitSwitchStmt(const SwitchStmt &S) {
|
|||
|
||||
// Otherwise, just forward the default block to the switch end.
|
||||
} else {
|
||||
DefaultBlock->replaceAllUsesWith(SwitchExit.Block);
|
||||
DefaultBlock->replaceAllUsesWith(SwitchExit.getBlock());
|
||||
delete DefaultBlock;
|
||||
}
|
||||
}
|
||||
|
||||
ConditionScope.ForceCleanup();
|
||||
|
||||
// Emit continuation.
|
||||
EmitBlock(SwitchExit.Block, true);
|
||||
EmitBlock(SwitchExit.getBlock(), true);
|
||||
|
||||
SwitchInsn = SavedSwitchInsn;
|
||||
CaseRangeBlock = SavedCRBlock;
|
||||
|
|
|
@ -31,8 +31,9 @@ CodeGenFunction::CodeGenFunction(CodeGenModule &cgm)
|
|||
: BlockFunction(cgm, *this, Builder), CGM(cgm),
|
||||
Target(CGM.getContext().Target),
|
||||
Builder(cgm.getModule().getContext()),
|
||||
NormalCleanupDest(0), EHCleanupDest(0), NextCleanupDestIndex(1),
|
||||
ExceptionSlot(0), DebugInfo(0), IndirectBranch(0),
|
||||
SwitchInsn(0), CaseRangeBlock(0), InvokeDest(0),
|
||||
SwitchInsn(0), CaseRangeBlock(0),
|
||||
DidCallStackSave(false), UnreachableBlock(0),
|
||||
CXXThisDecl(0), CXXThisValue(0), CXXVTTDecl(0), CXXVTTValue(0),
|
||||
ConditionalBranchLevel(0), TerminateLandingPad(0), TerminateHandler(0),
|
||||
|
@ -89,26 +90,26 @@ void CodeGenFunction::EmitReturnBlock() {
|
|||
|
||||
// We have a valid insert point, reuse it if it is empty or there are no
|
||||
// explicit jumps to the return block.
|
||||
if (CurBB->empty() || ReturnBlock.Block->use_empty()) {
|
||||
ReturnBlock.Block->replaceAllUsesWith(CurBB);
|
||||
delete ReturnBlock.Block;
|
||||
if (CurBB->empty() || ReturnBlock.getBlock()->use_empty()) {
|
||||
ReturnBlock.getBlock()->replaceAllUsesWith(CurBB);
|
||||
delete ReturnBlock.getBlock();
|
||||
} else
|
||||
EmitBlock(ReturnBlock.Block);
|
||||
EmitBlock(ReturnBlock.getBlock());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, if the return block is the target of a single direct
|
||||
// branch then we can just put the code in that block instead. This
|
||||
// cleans up functions which started with a unified return block.
|
||||
if (ReturnBlock.Block->hasOneUse()) {
|
||||
if (ReturnBlock.getBlock()->hasOneUse()) {
|
||||
llvm::BranchInst *BI =
|
||||
dyn_cast<llvm::BranchInst>(*ReturnBlock.Block->use_begin());
|
||||
dyn_cast<llvm::BranchInst>(*ReturnBlock.getBlock()->use_begin());
|
||||
if (BI && BI->isUnconditional() &&
|
||||
BI->getSuccessor(0) == ReturnBlock.Block) {
|
||||
BI->getSuccessor(0) == ReturnBlock.getBlock()) {
|
||||
// Reset insertion point and delete the branch.
|
||||
Builder.SetInsertPoint(BI->getParent());
|
||||
BI->eraseFromParent();
|
||||
delete ReturnBlock.Block;
|
||||
delete ReturnBlock.getBlock();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +118,7 @@ void CodeGenFunction::EmitReturnBlock() {
|
|||
// unless it has uses. However, we still need a place to put the debug
|
||||
// region.end for now.
|
||||
|
||||
EmitBlock(ReturnBlock.Block);
|
||||
EmitBlock(ReturnBlock.getBlock());
|
||||
}
|
||||
|
||||
static void EmitIfUsed(CodeGenFunction &CGF, llvm::BasicBlock *BB) {
|
||||
|
@ -170,6 +171,7 @@ void CodeGenFunction::FinishFunction(SourceLocation EndLoc) {
|
|||
}
|
||||
}
|
||||
|
||||
EmitIfUsed(*this, RethrowBlock.getBlock());
|
||||
EmitIfUsed(*this, TerminateLandingPad);
|
||||
EmitIfUsed(*this, TerminateHandler);
|
||||
EmitIfUsed(*this, UnreachableBlock);
|
||||
|
@ -585,7 +587,7 @@ llvm::BlockAddress *CodeGenFunction::GetAddrOfLabel(const LabelStmt *L) {
|
|||
if (IndirectBranch == 0)
|
||||
GetIndirectGotoBlock();
|
||||
|
||||
llvm::BasicBlock *BB = getJumpDestForLabel(L).Block;
|
||||
llvm::BasicBlock *BB = getJumpDestForLabel(L).getBlock();
|
||||
|
||||
// Make sure the indirect branch includes all of the address-taken blocks.
|
||||
IndirectBranch->addDestination(BB);
|
||||
|
@ -666,41 +668,75 @@ llvm::Value* CodeGenFunction::EmitVAListRef(const Expr* E) {
|
|||
void CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old) {
|
||||
assert(Old.isValid());
|
||||
|
||||
EHScopeStack::iterator E = EHStack.find(Old);
|
||||
while (EHStack.begin() != E)
|
||||
PopCleanupBlock();
|
||||
while (EHStack.stable_begin() != Old) {
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
|
||||
|
||||
// As long as Old strictly encloses the scope's enclosing normal
|
||||
// cleanup, we're going to emit another normal cleanup which
|
||||
// fallthrough can propagate through.
|
||||
bool FallThroughIsBranchThrough =
|
||||
Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
|
||||
|
||||
PopCleanupBlock(FallThroughIsBranchThrough);
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a switch instruction to thread branches out of the given
|
||||
/// block (which is the exit block of a cleanup).
|
||||
static void CreateCleanupSwitch(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Block) {
|
||||
if (Block->getTerminator()) {
|
||||
assert(isa<llvm::SwitchInst>(Block->getTerminator()) &&
|
||||
"cleanup block already has a terminator, but it isn't a switch");
|
||||
return;
|
||||
static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
|
||||
EHCleanupScope &Scope) {
|
||||
assert(Scope.isNormalCleanup());
|
||||
llvm::BasicBlock *Entry = Scope.getNormalBlock();
|
||||
if (!Entry) {
|
||||
Entry = CGF.createBasicBlock("cleanup");
|
||||
Scope.setNormalBlock(Entry);
|
||||
}
|
||||
return Entry;
|
||||
}
|
||||
|
||||
llvm::Value *DestCodePtr
|
||||
= CGF.CreateTempAlloca(CGF.Builder.getInt32Ty(), "cleanup.dst");
|
||||
CGBuilderTy Builder(Block);
|
||||
llvm::Value *DestCode = Builder.CreateLoad(DestCodePtr, "tmp");
|
||||
static llvm::BasicBlock *CreateEHEntry(CodeGenFunction &CGF,
|
||||
EHCleanupScope &Scope) {
|
||||
assert(Scope.isEHCleanup());
|
||||
llvm::BasicBlock *Entry = Scope.getEHBlock();
|
||||
if (!Entry) {
|
||||
Entry = CGF.createBasicBlock("eh.cleanup");
|
||||
Scope.setEHBlock(Entry);
|
||||
}
|
||||
return Entry;
|
||||
}
|
||||
|
||||
// Create a switch instruction to determine where to jump next.
|
||||
Builder.CreateSwitch(DestCode, CGF.getUnreachableBlock());
|
||||
/// Transitions the terminator of the given exit-block of a cleanup to
|
||||
/// be a cleanup switch.
|
||||
static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Block) {
|
||||
// If it's a branch, turn it into a switch whose default
|
||||
// destination is its original target.
|
||||
llvm::TerminatorInst *Term = Block->getTerminator();
|
||||
assert(Term && "can't transition block without terminator");
|
||||
|
||||
if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
|
||||
assert(Br->isUnconditional());
|
||||
llvm::LoadInst *Load =
|
||||
new llvm::LoadInst(CGF.getNormalCleanupDestSlot(), "cleanup.dest", Term);
|
||||
llvm::SwitchInst *Switch =
|
||||
llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
|
||||
Br->eraseFromParent();
|
||||
return Switch;
|
||||
} else {
|
||||
return cast<llvm::SwitchInst>(Term);
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to reduce a cleanup's entry block to a fallthrough. This
|
||||
/// is basically llvm::MergeBlockIntoPredecessor, except
|
||||
/// simplified/optimized for the tighter constraints on cleanup
|
||||
/// blocks.
|
||||
static void SimplifyCleanupEntry(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Entry) {
|
||||
/// simplified/optimized for the tighter constraints on cleanup blocks.
|
||||
///
|
||||
/// Returns the new block, whatever it is.
|
||||
static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Entry) {
|
||||
llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
|
||||
if (!Pred) return;
|
||||
if (!Pred) return Entry;
|
||||
|
||||
llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
|
||||
if (!Br || Br->isConditional()) return;
|
||||
if (!Br || Br->isConditional()) return Entry;
|
||||
assert(Br->getSuccessor(0) == Entry);
|
||||
|
||||
// If we were previously inserting at the end of the cleanup entry
|
||||
|
@ -720,80 +756,8 @@ static void SimplifyCleanupEntry(CodeGenFunction &CGF,
|
|||
|
||||
if (WasInsertBlock)
|
||||
CGF.Builder.SetInsertPoint(Pred);
|
||||
}
|
||||
|
||||
/// Attempts to reduce an cleanup's exit switch to an unconditional
|
||||
/// branch.
|
||||
static void SimplifyCleanupExit(llvm::BasicBlock *Exit) {
|
||||
llvm::TerminatorInst *Terminator = Exit->getTerminator();
|
||||
assert(Terminator && "completed cleanup exit has no terminator");
|
||||
|
||||
llvm::SwitchInst *Switch = dyn_cast<llvm::SwitchInst>(Terminator);
|
||||
if (!Switch) return;
|
||||
if (Switch->getNumCases() != 2) return; // default + 1
|
||||
|
||||
llvm::LoadInst *Cond = cast<llvm::LoadInst>(Switch->getCondition());
|
||||
llvm::AllocaInst *CondVar = cast<llvm::AllocaInst>(Cond->getPointerOperand());
|
||||
|
||||
// Replace the switch instruction with an unconditional branch.
|
||||
llvm::BasicBlock *Dest = Switch->getSuccessor(1); // default is 0
|
||||
Switch->eraseFromParent();
|
||||
llvm::BranchInst::Create(Dest, Exit);
|
||||
|
||||
// Delete all uses of the condition variable.
|
||||
Cond->eraseFromParent();
|
||||
while (!CondVar->use_empty())
|
||||
cast<llvm::StoreInst>(*CondVar->use_begin())->eraseFromParent();
|
||||
|
||||
// Delete the condition variable itself.
|
||||
CondVar->eraseFromParent();
|
||||
}
|
||||
|
||||
/// Threads a branch fixup through a cleanup block.
|
||||
static void ThreadFixupThroughCleanup(CodeGenFunction &CGF,
|
||||
BranchFixup &Fixup,
|
||||
llvm::BasicBlock *Entry,
|
||||
llvm::BasicBlock *Exit) {
|
||||
if (!Exit->getTerminator())
|
||||
CreateCleanupSwitch(CGF, Exit);
|
||||
|
||||
// Find the switch and its destination index alloca.
|
||||
llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Exit->getTerminator());
|
||||
llvm::Value *DestCodePtr =
|
||||
cast<llvm::LoadInst>(Switch->getCondition())->getPointerOperand();
|
||||
|
||||
// Compute the index of the new case we're adding to the switch.
|
||||
unsigned Index = Switch->getNumCases();
|
||||
|
||||
const llvm::IntegerType *i32 = llvm::Type::getInt32Ty(CGF.getLLVMContext());
|
||||
llvm::ConstantInt *IndexV = llvm::ConstantInt::get(i32, Index);
|
||||
|
||||
// Set the index in the origin block.
|
||||
new llvm::StoreInst(IndexV, DestCodePtr, Fixup.Origin);
|
||||
|
||||
// Add a case to the switch.
|
||||
Switch->addCase(IndexV, Fixup.Destination);
|
||||
|
||||
// Change the last branch to point to the cleanup entry block.
|
||||
Fixup.LatestBranch->setSuccessor(Fixup.LatestBranchIndex, Entry);
|
||||
|
||||
// And finally, update the fixup.
|
||||
Fixup.LatestBranch = Switch;
|
||||
Fixup.LatestBranchIndex = Index;
|
||||
}
|
||||
|
||||
/// Try to simplify both the entry and exit edges of a cleanup.
|
||||
static void SimplifyCleanupEdges(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Entry,
|
||||
llvm::BasicBlock *Exit) {
|
||||
|
||||
// Given their current implementations, it's important to run these
|
||||
// in this order: SimplifyCleanupEntry will delete Entry if it can
|
||||
// be merged into its predecessor, which will then break
|
||||
// SimplifyCleanupExit if (as is common) Entry == Exit.
|
||||
|
||||
SimplifyCleanupExit(Exit);
|
||||
SimplifyCleanupEntry(CGF, Entry);
|
||||
return Pred;
|
||||
}
|
||||
|
||||
static void EmitCleanup(CodeGenFunction &CGF,
|
||||
|
@ -805,42 +769,10 @@ static void EmitCleanup(CodeGenFunction &CGF,
|
|||
assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
|
||||
}
|
||||
|
||||
static void SplitAndEmitCleanup(CodeGenFunction &CGF,
|
||||
EHScopeStack::Cleanup *Fn,
|
||||
bool ForEH,
|
||||
llvm::BasicBlock *Entry) {
|
||||
assert(Entry && "no entry block for cleanup");
|
||||
|
||||
// Remove the switch and load from the end of the entry block.
|
||||
llvm::Instruction *Switch = &Entry->getInstList().back();
|
||||
Entry->getInstList().remove(Switch);
|
||||
assert(isa<llvm::SwitchInst>(Switch));
|
||||
llvm::Instruction *Load = &Entry->getInstList().back();
|
||||
Entry->getInstList().remove(Load);
|
||||
assert(isa<llvm::LoadInst>(Load));
|
||||
|
||||
assert(Entry->getInstList().empty() &&
|
||||
"lazy cleanup block not empty after removing load/switch pair?");
|
||||
|
||||
// Emit the actual cleanup at the end of the entry block.
|
||||
CGF.Builder.SetInsertPoint(Entry);
|
||||
EmitCleanup(CGF, Fn, ForEH);
|
||||
|
||||
// Put the load and switch at the end of the exit block.
|
||||
llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock();
|
||||
Exit->getInstList().push_back(Load);
|
||||
Exit->getInstList().push_back(Switch);
|
||||
|
||||
// Clean up the edges if possible.
|
||||
SimplifyCleanupEdges(CGF, Entry, Exit);
|
||||
|
||||
CGF.Builder.ClearInsertionPoint();
|
||||
}
|
||||
|
||||
/// Pops a cleanup block. If the block includes a normal cleanup, the
|
||||
/// current insertion point is threaded through the cleanup, as are
|
||||
/// any branch fixups on the cleanup.
|
||||
void CodeGenFunction::PopCleanupBlock() {
|
||||
void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
|
||||
assert(!EHStack.empty() && "cleanup stack is empty!");
|
||||
assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
|
||||
|
@ -848,8 +780,7 @@ void CodeGenFunction::PopCleanupBlock() {
|
|||
|
||||
// Check whether we need an EH cleanup. This is only true if we've
|
||||
// generated a lazy EH cleanup block.
|
||||
llvm::BasicBlock *EHEntry = Scope.getEHBlock();
|
||||
bool RequiresEHCleanup = (EHEntry != 0);
|
||||
bool RequiresEHCleanup = Scope.hasEHBranches();
|
||||
|
||||
// Check the three conditions which might require a normal cleanup:
|
||||
|
||||
|
@ -857,9 +788,8 @@ void CodeGenFunction::PopCleanupBlock() {
|
|||
unsigned FixupDepth = Scope.getFixupDepth();
|
||||
bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
|
||||
|
||||
// - whether control has already been threaded through this cleanup
|
||||
llvm::BasicBlock *NormalEntry = Scope.getNormalBlock();
|
||||
bool HasExistingBranches = (NormalEntry != 0);
|
||||
// - whether there are branch-throughs or branch-afters
|
||||
bool HasExistingBranches = Scope.hasBranches();
|
||||
|
||||
// - whether there's a fallthrough
|
||||
llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
|
||||
|
@ -873,7 +803,7 @@ void CodeGenFunction::PopCleanupBlock() {
|
|||
|
||||
// If we don't need the cleanup at all, we're done.
|
||||
if (!RequiresNormalCleanup && !RequiresEHCleanup) {
|
||||
EHStack.popCleanup();
|
||||
EHStack.popCleanup(); // safe because there are no fixups
|
||||
assert(EHStack.getNumBranchFixups() == 0 ||
|
||||
EHStack.hasNormalCleanups());
|
||||
return;
|
||||
|
@ -890,157 +820,440 @@ void CodeGenFunction::PopCleanupBlock() {
|
|||
EHScopeStack::Cleanup *Fn =
|
||||
reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data());
|
||||
|
||||
// We're done with the scope; pop it off so we can emit the cleanups.
|
||||
EHStack.popCleanup();
|
||||
// We want to emit the EH cleanup after the normal cleanup, but go
|
||||
// ahead and do the setup for the EH cleanup while the scope is still
|
||||
// alive.
|
||||
llvm::BasicBlock *EHEntry = 0;
|
||||
llvm::SmallVector<llvm::Instruction*, 2> EHInstsToAppend;
|
||||
if (RequiresEHCleanup) {
|
||||
EHEntry = CreateEHEntry(*this, Scope);
|
||||
|
||||
// Figure out the branch-through dest if necessary.
|
||||
llvm::BasicBlock *EHBranchThroughDest = 0;
|
||||
if (Scope.hasEHBranchThroughs()) {
|
||||
assert(Scope.getEnclosingEHCleanup() != EHStack.stable_end());
|
||||
EHScope &S = *EHStack.find(Scope.getEnclosingEHCleanup());
|
||||
EHBranchThroughDest = CreateEHEntry(*this, cast<EHCleanupScope>(S));
|
||||
}
|
||||
|
||||
// If we have exactly one branch-after and no branch-throughs, we
|
||||
// can dispatch it without a switch.
|
||||
if (!Scope.hasBranchThroughs() &&
|
||||
Scope.getNumEHBranchAfters() == 1) {
|
||||
assert(!EHBranchThroughDest);
|
||||
|
||||
// TODO: remove the spurious eh.cleanup.dest stores if this edge
|
||||
// never went through any switches.
|
||||
llvm::BasicBlock *BranchAfterDest = Scope.getEHBranchAfterBlock(0);
|
||||
EHInstsToAppend.push_back(llvm::BranchInst::Create(BranchAfterDest));
|
||||
|
||||
// Otherwise, if we have any branch-afters, we need a switch.
|
||||
} else if (Scope.getNumEHBranchAfters()) {
|
||||
// The default of the switch belongs to the branch-throughs if
|
||||
// they exist.
|
||||
llvm::BasicBlock *Default =
|
||||
(EHBranchThroughDest ? EHBranchThroughDest : getUnreachableBlock());
|
||||
|
||||
const unsigned SwitchCapacity = Scope.getNumEHBranchAfters();
|
||||
|
||||
llvm::LoadInst *Load =
|
||||
new llvm::LoadInst(getEHCleanupDestSlot(), "cleanup.dest");
|
||||
llvm::SwitchInst *Switch =
|
||||
llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
|
||||
|
||||
EHInstsToAppend.push_back(Load);
|
||||
EHInstsToAppend.push_back(Switch);
|
||||
|
||||
for (unsigned I = 0, E = Scope.getNumEHBranchAfters(); I != E; ++I)
|
||||
Switch->addCase(Scope.getEHBranchAfterIndex(I),
|
||||
Scope.getEHBranchAfterBlock(I));
|
||||
|
||||
// Otherwise, we have only branch-throughs; jump to the next EH
|
||||
// cleanup.
|
||||
} else {
|
||||
assert(EHBranchThroughDest);
|
||||
EHInstsToAppend.push_back(llvm::BranchInst::Create(EHBranchThroughDest));
|
||||
}
|
||||
}
|
||||
|
||||
if (!RequiresNormalCleanup) {
|
||||
EHStack.popCleanup();
|
||||
} else {
|
||||
// As a kindof crazy internal case, branch-through fall-throughs
|
||||
// leave the insertion point set to the end of the last cleanup.
|
||||
bool HasPrebranchedFallthrough =
|
||||
(HasFallthrough && FallthroughSource->getTerminator());
|
||||
assert(!HasPrebranchedFallthrough ||
|
||||
FallthroughSource->getTerminator()->getSuccessor(0)
|
||||
== Scope.getNormalBlock());
|
||||
|
||||
if (RequiresNormalCleanup) {
|
||||
// If we have a fallthrough and no other need for the cleanup,
|
||||
// emit it directly.
|
||||
if (HasFallthrough && !HasFixups && !HasExistingBranches) {
|
||||
if (HasFallthrough && !HasPrebranchedFallthrough &&
|
||||
!HasFixups && !HasExistingBranches) {
|
||||
|
||||
// Fixups can cause us to optimistically create a normal block,
|
||||
// only to later have no real uses for it. Just delete it in
|
||||
// this case.
|
||||
// TODO: we can potentially simplify all the uses after this.
|
||||
if (Scope.getNormalBlock()) {
|
||||
Scope.getNormalBlock()->replaceAllUsesWith(getUnreachableBlock());
|
||||
delete Scope.getNormalBlock();
|
||||
}
|
||||
|
||||
EHStack.popCleanup();
|
||||
|
||||
EmitCleanup(*this, Fn, /*ForEH*/ false);
|
||||
|
||||
// Otherwise, the best approach is to thread everything through
|
||||
// the cleanup block and then try to clean up after ourselves.
|
||||
} else {
|
||||
// Force the entry block to exist.
|
||||
if (!HasExistingBranches) {
|
||||
NormalEntry = createBasicBlock("cleanup");
|
||||
CreateCleanupSwitch(*this, NormalEntry);
|
||||
}
|
||||
llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
|
||||
|
||||
// If there's a fallthrough, we need to store the cleanup
|
||||
// destination index. For fall-throughs this is always zero.
|
||||
if (HasFallthrough && !HasPrebranchedFallthrough)
|
||||
Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
|
||||
|
||||
// Emit the entry block. This implicitly branches to it if we
|
||||
// have fallthrough. All the fixups and existing branches should
|
||||
// already be branched to it.
|
||||
EmitBlock(NormalEntry);
|
||||
|
||||
// Thread the fallthrough edge through the (momentarily trivial)
|
||||
// cleanup.
|
||||
llvm::BasicBlock *FallthroughDestination = 0;
|
||||
if (HasFallthrough) {
|
||||
assert(isa<llvm::BranchInst>(FallthroughSource->getTerminator()));
|
||||
FallthroughDestination = createBasicBlock("cleanup.cont");
|
||||
bool HasEnclosingCleanups =
|
||||
(Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
|
||||
|
||||
BranchFixup Fix;
|
||||
Fix.Destination = FallthroughDestination;
|
||||
Fix.LatestBranch = FallthroughSource->getTerminator();
|
||||
Fix.LatestBranchIndex = 0;
|
||||
Fix.Origin = Fix.LatestBranch;
|
||||
|
||||
// Restore fixup invariant. EmitBlock added a branch to the
|
||||
// cleanup which we need to redirect to the destination.
|
||||
cast<llvm::BranchInst>(Fix.LatestBranch)
|
||||
->setSuccessor(0, Fix.Destination);
|
||||
|
||||
ThreadFixupThroughCleanup(*this, Fix, NormalEntry, NormalEntry);
|
||||
// Compute the branch-through dest if we need it:
|
||||
// - if there are branch-throughs threaded through the scope
|
||||
// - if fall-through is a branch-through
|
||||
// - if there are fixups that will be optimistically forwarded
|
||||
// to the enclosing cleanup
|
||||
llvm::BasicBlock *BranchThroughDest = 0;
|
||||
if (Scope.hasBranchThroughs() ||
|
||||
(HasFallthrough && FallthroughIsBranchThrough) ||
|
||||
(HasFixups && HasEnclosingCleanups)) {
|
||||
assert(HasEnclosingCleanups);
|
||||
EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
|
||||
BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
|
||||
}
|
||||
|
||||
// Thread any "real" fixups we need to thread.
|
||||
llvm::BasicBlock *FallthroughDest = 0;
|
||||
llvm::SmallVector<llvm::Instruction*, 2> InstsToAppend;
|
||||
|
||||
// If there's exactly one branch-after and no other threads,
|
||||
// we can route it without a switch.
|
||||
if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
|
||||
Scope.getNumBranchAfters() == 1) {
|
||||
assert(!BranchThroughDest);
|
||||
|
||||
// TODO: clean up the possibly dead stores to the cleanup dest slot.
|
||||
llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
|
||||
InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
|
||||
|
||||
// Build a switch-out if we need it:
|
||||
// - if there are branch-afters threaded through the scope
|
||||
// - if fall-through is a branch-after
|
||||
// - if there are fixups that have nowhere left to go and
|
||||
// so must be immediately resolved
|
||||
} else if (Scope.getNumBranchAfters() ||
|
||||
(HasFallthrough && !FallthroughIsBranchThrough) ||
|
||||
(HasFixups && !HasEnclosingCleanups)) {
|
||||
|
||||
llvm::BasicBlock *Default =
|
||||
(BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
|
||||
|
||||
// TODO: base this on the number of branch-afters and fixups
|
||||
const unsigned SwitchCapacity = 10;
|
||||
|
||||
llvm::LoadInst *Load =
|
||||
new llvm::LoadInst(getNormalCleanupDestSlot(), "cleanup.dest");
|
||||
llvm::SwitchInst *Switch =
|
||||
llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
|
||||
|
||||
InstsToAppend.push_back(Load);
|
||||
InstsToAppend.push_back(Switch);
|
||||
|
||||
// Branch-after fallthrough.
|
||||
if (HasFallthrough && !FallthroughIsBranchThrough) {
|
||||
FallthroughDest = createBasicBlock("cleanup.cont");
|
||||
Switch->addCase(Builder.getInt32(0), FallthroughDest);
|
||||
}
|
||||
|
||||
for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
|
||||
Switch->addCase(Scope.getBranchAfterIndex(I),
|
||||
Scope.getBranchAfterBlock(I));
|
||||
}
|
||||
|
||||
if (HasFixups && !HasEnclosingCleanups)
|
||||
ResolveAllBranchFixups(Switch);
|
||||
} else {
|
||||
// We should always have a branch-through destination in this case.
|
||||
assert(BranchThroughDest);
|
||||
InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
|
||||
}
|
||||
|
||||
// We're finally ready to pop the cleanup.
|
||||
EHStack.popCleanup();
|
||||
assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
|
||||
|
||||
EmitCleanup(*this, Fn, /*ForEH*/ false);
|
||||
|
||||
// Append the prepared cleanup prologue from above.
|
||||
llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
|
||||
for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
|
||||
NormalExit->getInstList().push_back(InstsToAppend[I]);
|
||||
|
||||
// Optimistically hope that any fixups will continue falling through.
|
||||
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
|
||||
I != E; ++I)
|
||||
if (CGF.EHStack.getBranchFixup(I).Destination)
|
||||
ThreadFixupThroughCleanup(*this, EHStack.getBranchFixup(I),
|
||||
NormalEntry, NormalEntry);
|
||||
I < E; ++I) {
|
||||
BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
|
||||
if (!Fixup.Destination) continue;
|
||||
if (!Fixup.OptimisticBranchBlock) {
|
||||
new llvm::StoreInst(Builder.getInt32(Fixup.DestinationIndex),
|
||||
getNormalCleanupDestSlot(),
|
||||
Fixup.InitialBranch);
|
||||
Fixup.InitialBranch->setSuccessor(0, NormalEntry);
|
||||
}
|
||||
Fixup.OptimisticBranchBlock = NormalExit;
|
||||
}
|
||||
|
||||
if (FallthroughDest)
|
||||
EmitBlock(FallthroughDest);
|
||||
else if (!HasFallthrough)
|
||||
Builder.ClearInsertionPoint();
|
||||
|
||||
SplitAndEmitCleanup(*this, Fn, /*ForEH*/ false, NormalEntry);
|
||||
// Check whether we can merge NormalEntry into a single predecessor.
|
||||
// This might invalidate (non-IR) pointers to NormalEntry.
|
||||
llvm::BasicBlock *NewNormalEntry =
|
||||
SimplifyCleanupEntry(*this, NormalEntry);
|
||||
|
||||
if (HasFallthrough)
|
||||
EmitBlock(FallthroughDestination);
|
||||
// If it did invalidate those pointers, and NormalEntry was the same
|
||||
// as NormalExit, go back and patch up the fixups.
|
||||
if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
|
||||
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
|
||||
I < E; ++I)
|
||||
CGF.EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
|
||||
}
|
||||
}
|
||||
|
||||
assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
|
||||
|
||||
// Emit the EH cleanup if required.
|
||||
if (RequiresEHCleanup) {
|
||||
CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
|
||||
|
||||
EmitBlock(EHEntry);
|
||||
SplitAndEmitCleanup(*this, Fn, /*ForEH*/ true, EHEntry);
|
||||
EmitCleanup(*this, Fn, /*ForEH*/ true);
|
||||
|
||||
// Append the prepared cleanup prologue from above.
|
||||
llvm::BasicBlock *EHExit = Builder.GetInsertBlock();
|
||||
for (unsigned I = 0, E = EHInstsToAppend.size(); I != E; ++I)
|
||||
EHExit->getInstList().push_back(EHInstsToAppend[I]);
|
||||
|
||||
Builder.restoreIP(SavedIP);
|
||||
|
||||
SimplifyCleanupEntry(*this, EHEntry);
|
||||
}
|
||||
}
|
||||
|
||||
/// Terminate the current block by emitting a branch which might leave
|
||||
/// the current cleanup-protected scope. The target scope may not yet
|
||||
/// be known, in which case this will require a fixup.
|
||||
///
|
||||
/// As a side-effect, this method clears the insertion point.
|
||||
void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
|
||||
if (!HaveInsertPoint())
|
||||
return;
|
||||
|
||||
// Create the branch.
|
||||
llvm::BranchInst *BI = Builder.CreateBr(Dest.Block);
|
||||
llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
|
||||
|
||||
// If we're not in a cleanup scope, we don't need to worry about
|
||||
// If we're not in a cleanup scope, or if the destination scope is
|
||||
// the current normal-cleanup scope, we don't need to worry about
|
||||
// fixups.
|
||||
if (!EHStack.hasNormalCleanups()) {
|
||||
if (!EHStack.hasNormalCleanups() ||
|
||||
Dest.getScopeDepth() == EHStack.getInnermostNormalCleanup()) {
|
||||
Builder.ClearInsertionPoint();
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize a fixup.
|
||||
BranchFixup Fixup;
|
||||
Fixup.Destination = Dest.Block;
|
||||
Fixup.Origin = BI;
|
||||
Fixup.LatestBranch = BI;
|
||||
Fixup.LatestBranchIndex = 0;
|
||||
|
||||
// If we can't resolve the destination cleanup scope, just add this
|
||||
// to the current cleanup scope.
|
||||
if (!Dest.ScopeDepth.isValid()) {
|
||||
EHStack.addBranchFixup() = Fixup;
|
||||
// to the current cleanup scope as a branch fixup.
|
||||
if (!Dest.getScopeDepth().isValid()) {
|
||||
BranchFixup &Fixup = EHStack.addBranchFixup();
|
||||
Fixup.Destination = Dest.getBlock();
|
||||
Fixup.DestinationIndex = Dest.getDestIndex();
|
||||
Fixup.InitialBranch = BI;
|
||||
Fixup.OptimisticBranchBlock = 0;
|
||||
|
||||
Builder.ClearInsertionPoint();
|
||||
return;
|
||||
}
|
||||
|
||||
for (EHScopeStack::iterator I = EHStack.begin(),
|
||||
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
|
||||
if (isa<EHCleanupScope>(*I)) {
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
|
||||
if (Scope.isNormalCleanup()) {
|
||||
llvm::BasicBlock *Block = Scope.getNormalBlock();
|
||||
if (!Block) {
|
||||
Block = createBasicBlock("cleanup");
|
||||
Scope.setNormalBlock(Block);
|
||||
}
|
||||
ThreadFixupThroughCleanup(*this, Fixup, Block, Block);
|
||||
// Otherwise, thread through all the normal cleanups in scope.
|
||||
|
||||
// Store the index at the start.
|
||||
llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
|
||||
new llvm::StoreInst(Index, getNormalCleanupDestSlot(), BI);
|
||||
|
||||
// Adjust BI to point to the first cleanup block.
|
||||
{
|
||||
EHCleanupScope &Scope =
|
||||
cast<EHCleanupScope>(*EHStack.find(EHStack.getInnermostNormalCleanup()));
|
||||
BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
|
||||
}
|
||||
|
||||
// Add this destination to all the scopes involved.
|
||||
EHScopeStack::stable_iterator I = EHStack.getInnermostNormalCleanup();
|
||||
EHScopeStack::stable_iterator E = Dest.getScopeDepth();
|
||||
if (E.strictlyEncloses(I)) {
|
||||
while (true) {
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
|
||||
assert(Scope.isNormalCleanup());
|
||||
I = Scope.getEnclosingNormalCleanup();
|
||||
|
||||
// If this is the last cleanup we're propagating through, tell it
|
||||
// that there's a resolved jump moving through it.
|
||||
if (!E.strictlyEncloses(I)) {
|
||||
Scope.addBranchAfter(Index, Dest.getBlock());
|
||||
break;
|
||||
}
|
||||
|
||||
// Otherwise, tell the scope that there's a jump propoagating
|
||||
// through it. If this isn't new information, all the rest of
|
||||
// the work has been done before.
|
||||
if (!Scope.addBranchThrough(Dest.getBlock()))
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Builder.ClearInsertionPoint();
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) {
|
||||
void CodeGenFunction::EmitBranchThroughEHCleanup(UnwindDest Dest) {
|
||||
// We should never get invalid scope depths for an UnwindDest; that
|
||||
// implies that the destination wasn't set up correctly.
|
||||
assert(Dest.getScopeDepth().isValid() && "invalid scope depth on EH dest?");
|
||||
|
||||
if (!HaveInsertPoint())
|
||||
return;
|
||||
|
||||
// Create the branch.
|
||||
llvm::BranchInst *BI = Builder.CreateBr(Dest.Block);
|
||||
llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
|
||||
|
||||
// If we're not in a cleanup scope, we don't need to worry about
|
||||
// fixups.
|
||||
if (!EHStack.hasEHCleanups()) {
|
||||
// If the destination is in the same EH cleanup scope as us, we
|
||||
// don't need to thread through anything.
|
||||
if (Dest.getScopeDepth() == EHStack.getInnermostEHCleanup()) {
|
||||
Builder.ClearInsertionPoint();
|
||||
return;
|
||||
}
|
||||
assert(EHStack.hasEHCleanups());
|
||||
|
||||
// Initialize a fixup.
|
||||
BranchFixup Fixup;
|
||||
Fixup.Destination = Dest.Block;
|
||||
Fixup.Origin = BI;
|
||||
Fixup.LatestBranch = BI;
|
||||
Fixup.LatestBranchIndex = 0;
|
||||
// Store the index at the start.
|
||||
llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
|
||||
new llvm::StoreInst(Index, getEHCleanupDestSlot(), BI);
|
||||
|
||||
// We should never get invalid scope depths for these: invalid scope
|
||||
// depths only arise for as-yet-unemitted labels, and we can't do an
|
||||
// EH-unwind to one of those.
|
||||
assert(Dest.ScopeDepth.isValid() && "invalid scope depth on EH dest?");
|
||||
// Adjust BI to point to the first cleanup block.
|
||||
{
|
||||
EHCleanupScope &Scope =
|
||||
cast<EHCleanupScope>(*EHStack.find(EHStack.getInnermostEHCleanup()));
|
||||
BI->setSuccessor(0, CreateEHEntry(*this, Scope));
|
||||
}
|
||||
|
||||
// Add this destination to all the scopes involved.
|
||||
for (EHScopeStack::stable_iterator
|
||||
I = EHStack.getInnermostEHCleanup(),
|
||||
E = Dest.getScopeDepth(); ; ) {
|
||||
assert(E.strictlyEncloses(I));
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
|
||||
assert(Scope.isEHCleanup());
|
||||
I = Scope.getEnclosingEHCleanup();
|
||||
|
||||
for (EHScopeStack::iterator I = EHStack.begin(),
|
||||
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
|
||||
if (isa<EHCleanupScope>(*I)) {
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
|
||||
if (Scope.isEHCleanup()) {
|
||||
llvm::BasicBlock *Block = Scope.getEHBlock();
|
||||
if (!Block) {
|
||||
Block = createBasicBlock("eh.cleanup");
|
||||
Scope.setEHBlock(Block);
|
||||
}
|
||||
ThreadFixupThroughCleanup(*this, Fixup, Block, Block);
|
||||
}
|
||||
// If this is the last cleanup we're propagating through, add this
|
||||
// as a branch-after.
|
||||
if (I == E) {
|
||||
Scope.addEHBranchAfter(Index, Dest.getBlock());
|
||||
break;
|
||||
}
|
||||
|
||||
// Otherwise, add it as a branch-through. If this isn't new
|
||||
// information, all the rest of the work has been done before.
|
||||
if (!Scope.addEHBranchThrough(Dest.getBlock()))
|
||||
break;
|
||||
}
|
||||
|
||||
Builder.ClearInsertionPoint();
|
||||
}
|
||||
|
||||
/// All the branch fixups on the EH stack have propagated out past the
|
||||
/// outermost normal cleanup; resolve them all by adding cases to the
|
||||
/// given switch instruction.
|
||||
void CodeGenFunction::ResolveAllBranchFixups(llvm::SwitchInst *Switch) {
|
||||
llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
|
||||
|
||||
for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
|
||||
// Skip this fixup if its destination isn't set or if we've
|
||||
// already treated it.
|
||||
BranchFixup &Fixup = EHStack.getBranchFixup(I);
|
||||
if (Fixup.Destination == 0) continue;
|
||||
if (!CasesAdded.insert(Fixup.Destination)) continue;
|
||||
|
||||
Switch->addCase(Builder.getInt32(Fixup.DestinationIndex),
|
||||
Fixup.Destination);
|
||||
}
|
||||
|
||||
EHStack.clearFixups();
|
||||
}
|
||||
|
||||
void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
|
||||
assert(Block && "resolving a null target block");
|
||||
if (!EHStack.getNumBranchFixups()) return;
|
||||
|
||||
assert(EHStack.hasNormalCleanups() &&
|
||||
"branch fixups exist with no normal cleanups on stack");
|
||||
|
||||
llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
|
||||
bool ResolvedAny = false;
|
||||
|
||||
for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
|
||||
// Skip this fixup if its destination doesn't match.
|
||||
BranchFixup &Fixup = EHStack.getBranchFixup(I);
|
||||
if (Fixup.Destination != Block) continue;
|
||||
|
||||
Fixup.Destination = 0;
|
||||
ResolvedAny = true;
|
||||
|
||||
// If it doesn't have an optimistic branch block, LatestBranch is
|
||||
// already pointing to the right place.
|
||||
llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
|
||||
if (!BranchBB)
|
||||
continue;
|
||||
|
||||
// Don't process the same optimistic branch block twice.
|
||||
if (!ModifiedOptimisticBlocks.insert(BranchBB))
|
||||
continue;
|
||||
|
||||
llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
|
||||
|
||||
// Add a case to the switch.
|
||||
Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
|
||||
}
|
||||
|
||||
if (ResolvedAny)
|
||||
EHStack.popNullFixups();
|
||||
}
|
||||
|
||||
llvm::Value *CodeGenFunction::getNormalCleanupDestSlot() {
|
||||
if (!NormalCleanupDest)
|
||||
NormalCleanupDest =
|
||||
CreateTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
|
||||
return NormalCleanupDest;
|
||||
}
|
||||
|
||||
llvm::Value *CodeGenFunction::getEHCleanupDestSlot() {
|
||||
if (!EHCleanupDest)
|
||||
EHCleanupDest =
|
||||
CreateTempAlloca(Builder.getInt32Ty(), "eh.cleanup.dest.slot");
|
||||
return EHCleanupDest;
|
||||
}
|
||||
|
|
|
@ -77,22 +77,22 @@ namespace CodeGen {
|
|||
/// the innermost cleanup. When a (normal) cleanup is popped, any
|
||||
/// unresolved fixups in that scope are threaded through the cleanup.
|
||||
struct BranchFixup {
|
||||
/// The origin of the branch. Any switch-index stores required by
|
||||
/// cleanup threading are added before this instruction.
|
||||
llvm::Instruction *Origin;
|
||||
/// The block containing the terminator which needs to be modified
|
||||
/// into a switch if this fixup is resolved into the current scope.
|
||||
/// If null, LatestBranch points directly to the destination.
|
||||
llvm::BasicBlock *OptimisticBranchBlock;
|
||||
|
||||
/// The destination of the branch.
|
||||
/// The ultimate destination of the branch.
|
||||
///
|
||||
/// This can be set to null to indicate that this fixup was
|
||||
/// successfully resolved.
|
||||
llvm::BasicBlock *Destination;
|
||||
|
||||
/// The last branch of the fixup. It is an invariant that
|
||||
/// LatestBranch->getSuccessor(LatestBranchIndex) == Destination.
|
||||
///
|
||||
/// The branch is always either a BranchInst or a SwitchInst.
|
||||
llvm::TerminatorInst *LatestBranch;
|
||||
unsigned LatestBranchIndex;
|
||||
/// The destination index value.
|
||||
unsigned DestinationIndex;
|
||||
|
||||
/// The initial branch of the fixup.
|
||||
llvm::BranchInst *InitialBranch;
|
||||
};
|
||||
|
||||
enum CleanupKind { NormalAndEHCleanup, EHCleanup, NormalCleanup };
|
||||
|
@ -117,9 +117,8 @@ public:
|
|||
|
||||
bool isValid() const { return Size >= 0; }
|
||||
|
||||
/// \return true if this scope is (non-strictly) nested within the
|
||||
/// given scope, assuming they're both valid
|
||||
bool isWithin(stable_iterator I) const { return Size <= I.Size; }
|
||||
bool encloses(stable_iterator I) const { return Size <= I.Size; }
|
||||
bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; }
|
||||
|
||||
friend bool operator==(stable_iterator A, stable_iterator B) {
|
||||
return A.Size == B.Size;
|
||||
|
@ -182,6 +181,11 @@ private:
|
|||
/// The number of catches on the stack.
|
||||
unsigned CatchDepth;
|
||||
|
||||
/// The current EH destination index. Reset to FirstCatchIndex
|
||||
/// whenever the last EH cleanup is popped.
|
||||
unsigned NextEHDestIndex;
|
||||
enum { FirstEHDestIndex = 1 };
|
||||
|
||||
/// The current set of branch fixups. A branch fixup is a jump to
|
||||
/// an as-yet unemitted label, i.e. a label for which we don't yet
|
||||
/// know the EH stack depth. Whenever we pop a cleanup, we have
|
||||
|
@ -203,15 +207,13 @@ private:
|
|||
|
||||
char *allocate(size_t Size);
|
||||
|
||||
void popNullFixups();
|
||||
|
||||
void *pushCleanup(CleanupKind K, size_t DataSize);
|
||||
|
||||
public:
|
||||
EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
|
||||
InnermostNormalCleanup(stable_end()),
|
||||
InnermostEHCleanup(stable_end()),
|
||||
CatchDepth(0) {}
|
||||
CatchDepth(0), NextEHDestIndex(FirstEHDestIndex) {}
|
||||
~EHScopeStack() { delete[] StartOfBuffer; }
|
||||
|
||||
// Variadic templates would make this not terrible.
|
||||
|
@ -366,8 +368,17 @@ public:
|
|||
return BranchFixups[I];
|
||||
}
|
||||
|
||||
/// Mark any branch fixups leading to the given block as resolved.
|
||||
void resolveBranchFixups(llvm::BasicBlock *Dest);
|
||||
/// Pops lazily-removed fixups from the end of the list. This
|
||||
/// should only be called by procedures which have just popped a
|
||||
/// cleanup or resolved one or more fixups.
|
||||
void popNullFixups();
|
||||
|
||||
/// Clears the branch-fixups list. This should only be called by
|
||||
/// CodeGenFunction::ResolveAllBranchFixups.
|
||||
void clearFixups() { BranchFixups.clear(); }
|
||||
|
||||
/// Gets the next EH destination index.
|
||||
unsigned getNextEHDestIndex() { return NextEHDestIndex++; }
|
||||
};
|
||||
|
||||
/// CodeGenFunction - This class organizes the per-function state that is used
|
||||
|
@ -376,16 +387,44 @@ class CodeGenFunction : public BlockFunction {
|
|||
CodeGenFunction(const CodeGenFunction&); // DO NOT IMPLEMENT
|
||||
void operator=(const CodeGenFunction&); // DO NOT IMPLEMENT
|
||||
public:
|
||||
/// A jump destination is a pair of a basic block and a cleanup
|
||||
/// depth. They are used to implement direct jumps across cleanup
|
||||
/// scopes, e.g. goto, break, continue, and return.
|
||||
/// A jump destination is an abstract label, branching to which may
|
||||
/// require a jump out through normal cleanups.
|
||||
struct JumpDest {
|
||||
JumpDest() : Block(0), ScopeDepth() {}
|
||||
JumpDest(llvm::BasicBlock *Block, EHScopeStack::stable_iterator Depth)
|
||||
: Block(Block), ScopeDepth(Depth) {}
|
||||
JumpDest() : Block(0), ScopeDepth(), Index(0) {}
|
||||
JumpDest(llvm::BasicBlock *Block,
|
||||
EHScopeStack::stable_iterator Depth,
|
||||
unsigned Index)
|
||||
: Block(Block), ScopeDepth(Depth), Index(Index) {}
|
||||
|
||||
bool isValid() const { return Block != 0; }
|
||||
llvm::BasicBlock *getBlock() const { return Block; }
|
||||
EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; }
|
||||
unsigned getDestIndex() const { return Index; }
|
||||
|
||||
private:
|
||||
llvm::BasicBlock *Block;
|
||||
EHScopeStack::stable_iterator ScopeDepth;
|
||||
unsigned Index;
|
||||
};
|
||||
|
||||
/// An unwind destination is an abstract label, branching to which
|
||||
/// may require a jump out through EH cleanups.
|
||||
struct UnwindDest {
|
||||
UnwindDest() : Block(0), ScopeDepth(), Index(0) {}
|
||||
UnwindDest(llvm::BasicBlock *Block,
|
||||
EHScopeStack::stable_iterator Depth,
|
||||
unsigned Index)
|
||||
: Block(Block), ScopeDepth(Depth), Index(Index) {}
|
||||
|
||||
bool isValid() const { return Block != 0; }
|
||||
llvm::BasicBlock *getBlock() const { return Block; }
|
||||
EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; }
|
||||
unsigned getDestIndex() const { return Index; }
|
||||
|
||||
private:
|
||||
llvm::BasicBlock *Block;
|
||||
EHScopeStack::stable_iterator ScopeDepth;
|
||||
unsigned Index;
|
||||
};
|
||||
|
||||
CodeGenModule &CGM; // Per-module state.
|
||||
|
@ -413,6 +452,9 @@ public:
|
|||
/// iff the function has no return value.
|
||||
llvm::Value *ReturnValue;
|
||||
|
||||
/// RethrowBlock - Unified rethrow block.
|
||||
UnwindDest RethrowBlock;
|
||||
|
||||
/// AllocaInsertPoint - This is an instruction in the entry block before which
|
||||
/// we prefer to insert allocas.
|
||||
llvm::AssertingVH<llvm::Instruction> AllocaInsertPt;
|
||||
|
@ -430,6 +472,12 @@ public:
|
|||
|
||||
EHScopeStack EHStack;
|
||||
|
||||
/// i32s containing the indexes of the cleanup destinations.
|
||||
llvm::AllocaInst *NormalCleanupDest;
|
||||
llvm::AllocaInst *EHCleanupDest;
|
||||
|
||||
unsigned NextCleanupDestIndex;
|
||||
|
||||
/// The exception slot. All landing pads write the current
|
||||
/// exception pointer into this alloca.
|
||||
llvm::Value *ExceptionSlot;
|
||||
|
@ -469,7 +517,7 @@ public:
|
|||
|
||||
/// PopCleanupBlock - Will pop the cleanup entry on the stack and
|
||||
/// process all branch fixups.
|
||||
void PopCleanupBlock();
|
||||
void PopCleanupBlock(bool FallThroughIsBranchThrough = false);
|
||||
|
||||
/// \brief Enters a new scope for capturing cleanups, all of which
|
||||
/// will be executed once the scope is exited.
|
||||
|
@ -520,18 +568,21 @@ public:
|
|||
/// the cleanup blocks that have been added.
|
||||
void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize);
|
||||
|
||||
void ResolveAllBranchFixups(llvm::SwitchInst *Switch);
|
||||
void ResolveBranchFixups(llvm::BasicBlock *Target);
|
||||
|
||||
/// The given basic block lies in the current EH scope, but may be a
|
||||
/// target of a potentially scope-crossing jump; get a stable handle
|
||||
/// to which we can perform this jump later.
|
||||
JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) const {
|
||||
return JumpDest(Target, EHStack.stable_begin());
|
||||
JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) {
|
||||
return JumpDest(Target, EHStack.stable_begin(), NextCleanupDestIndex++);
|
||||
}
|
||||
|
||||
/// The given basic block lies in the current EH scope, but may be a
|
||||
/// target of a potentially scope-crossing jump; get a stable handle
|
||||
/// to which we can perform this jump later.
|
||||
JumpDest getJumpDestInCurrentScope(const char *Name = 0) {
|
||||
return JumpDest(createBasicBlock(Name), EHStack.stable_begin());
|
||||
return getJumpDestInCurrentScope(createBasicBlock(Name));
|
||||
}
|
||||
|
||||
/// EmitBranchThroughCleanup - Emit a branch from the current insert
|
||||
|
@ -542,7 +593,11 @@ public:
|
|||
/// EmitBranchThroughEHCleanup - Emit a branch from the current
|
||||
/// insert block through the EH cleanup handling code (if any) and
|
||||
/// then on to \arg Dest.
|
||||
void EmitBranchThroughEHCleanup(JumpDest Dest);
|
||||
void EmitBranchThroughEHCleanup(UnwindDest Dest);
|
||||
|
||||
/// getRethrowDest - Returns the unified outermost-scope rethrow
|
||||
/// destination.
|
||||
UnwindDest getRethrowDest();
|
||||
|
||||
/// BeginConditionalBranch - Should be called before a conditional part of an
|
||||
/// expression is emitted. For example, before the RHS of the expression below
|
||||
|
@ -600,10 +655,6 @@ private:
|
|||
/// statement range in current switch instruction.
|
||||
llvm::BasicBlock *CaseRangeBlock;
|
||||
|
||||
/// InvokeDest - This is the nearest exception target for calls
|
||||
/// which can unwind, when exceptions are being used.
|
||||
llvm::BasicBlock *InvokeDest;
|
||||
|
||||
// VLASizeMap - This keeps track of the associated size for each VLA type.
|
||||
// We track this by the size expression rather than the type itself because
|
||||
// in certain situations, like a const qualifier applied to an VLA typedef,
|
||||
|
@ -660,6 +711,9 @@ public:
|
|||
/// is assigned in every landing pad.
|
||||
llvm::Value *getExceptionSlot();
|
||||
|
||||
llvm::Value *getNormalCleanupDestSlot();
|
||||
llvm::Value *getEHCleanupDestSlot();
|
||||
|
||||
llvm::BasicBlock *getUnreachableBlock() {
|
||||
if (!UnreachableBlock) {
|
||||
UnreachableBlock = createBasicBlock("unreachable");
|
||||
|
|
|
@ -105,12 +105,12 @@ void while_destruct(int z) {
|
|||
// CHECK-NEXT: br i1 [[COND]]
|
||||
|
||||
// Loop-exit staging block.
|
||||
// CHECK: store i32 1, i32* [[CLEANUPDEST]]
|
||||
// CHECK: store i32 3, i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: br
|
||||
|
||||
// While body.
|
||||
// CHECK: store i32 21, i32* [[Z]]
|
||||
// CHECK: store i32 2, i32* [[CLEANUPDEST]]
|
||||
// CHECK: store i32 0, i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: br
|
||||
z = 21;
|
||||
|
||||
|
@ -138,7 +138,7 @@ void while_destruct(int z) {
|
|||
// CHECK: define void @_Z12for_destructi(
|
||||
void for_destruct(int z) {
|
||||
// CHECK: [[Z:%.*]] = alloca i32
|
||||
// CHECK: [[XDEST:%.*]] = alloca i32
|
||||
// CHECK: [[CLEANUPDEST:%.*]] = alloca i32
|
||||
// CHECK: [[I:%.*]] = alloca i32
|
||||
// CHECK: call void @_ZN1YC1Ev
|
||||
// CHECK-NEXT: br
|
||||
|
@ -152,7 +152,7 @@ void for_destruct(int z) {
|
|||
// -> %for.body, %for.cond.cleanup
|
||||
|
||||
// %for.cond.cleanup: Exit cleanup staging.
|
||||
// CHECK: store i32 1, i32* [[XDEST]]
|
||||
// CHECK: store i32 2, i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: br
|
||||
// -> %cleanup
|
||||
|
||||
|
@ -166,21 +166,21 @@ void for_destruct(int z) {
|
|||
// CHECK: [[TMP:%.*]] = load i32* [[Z]]
|
||||
// CHECK-NEXT: [[INC:%.*]] = add nsw i32 [[TMP]], 1
|
||||
// CHECK-NEXT: store i32 [[INC]], i32* [[Z]]
|
||||
// CHECK-NEXT: store i32 2, i32* [[XDEST]]
|
||||
// CHECK-NEXT: store i32 0, i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: br
|
||||
// -> %cleanup
|
||||
|
||||
// %cleanup: Destroys X.
|
||||
// CHECK: call void @_ZN1XD1Ev
|
||||
// CHECK-NEXT: [[YDESTTMP:%.*]] = load i32* [[XDEST]]
|
||||
// CHECK-NEXT: [[YDESTTMP:%.*]] = load i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: switch i32 [[YDESTTMP]]
|
||||
// 1 -> %cleanup4, 2 -> %cleanup.cont
|
||||
// 0 -> %cleanup.cont, default -> %cleanup1
|
||||
|
||||
// %cleanup.cont: (eliminable)
|
||||
// CHECK: br
|
||||
// -> %for.cond
|
||||
|
||||
// %cleanup4: Destroys Y.
|
||||
// %cleanup1: Destroys Y.
|
||||
// CHECK: call void @_ZN1YD1Ev(
|
||||
// CHECK-NEXT: br
|
||||
// -> %for.end
|
||||
|
|
|
@ -39,6 +39,7 @@ void test2() {
|
|||
// CHECK: [[FREEVAR:%.*]] = alloca i1
|
||||
// CHECK-NEXT: [[EXNOBJVAR:%.*]] = alloca i8*
|
||||
// CHECK-NEXT: [[EXNSLOTVAR:%.*]] = alloca i8*
|
||||
// CHECK-NEXT: [[CLEANUPDESTVAR:%.*]] = alloca i32
|
||||
// CHECK-NEXT: store i1 false, i1* [[FREEVAR]]
|
||||
// CHECK-NEXT: [[EXNOBJ:%.*]] = call i8* @__cxa_allocate_exception(i64 16)
|
||||
// CHECK-NEXT: store i8* [[EXNOBJ]], i8** [[EXNOBJVAR]]
|
||||
|
@ -124,6 +125,7 @@ namespace test7 {
|
|||
// CHECK-NEXT: [[EXNALLOCVAR:%.*]] = alloca i8*
|
||||
// CHECK-NEXT: [[CAUGHTEXNVAR:%.*]] = alloca i8*
|
||||
// CHECK-NEXT: [[INTCATCHVAR:%.*]] = alloca i32
|
||||
// CHECK-NEXT: [[EHCLEANUPDESTVAR:%.*]] = alloca i32
|
||||
// CHECK-NEXT: store i1 false, i1* [[FREEEXNOBJ]]
|
||||
try {
|
||||
try {
|
||||
|
@ -153,6 +155,7 @@ namespace test7 {
|
|||
// CHECK: [[CAUGHTEXN:%.*]] = call i8* @llvm.eh.exception()
|
||||
// CHECK-NEXT: store i8* [[CAUGHTEXN]], i8** [[CAUGHTEXNVAR]]
|
||||
// CHECK-NEXT: call i32 (i8*, i8*, ...)* @llvm.eh.selector(i8* [[CAUGHTEXN]], i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*), i8* null)
|
||||
// CHECK-NEXT: store i32 1, i32* [[EHCLEANUPDESTVAR]]
|
||||
// CHECK-NEXT: call void @__cxa_end_catch()
|
||||
// CHECK-NEXT: br label
|
||||
// CHECK: load i8** [[CAUGHTEXNVAR]]
|
||||
|
@ -290,3 +293,71 @@ namespace test11 {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// PR7686
|
||||
namespace test12 {
|
||||
struct A { ~A(); };
|
||||
bool opaque(const A&);
|
||||
|
||||
// CHECK: define void @_ZN6test124testEv()
|
||||
void test() {
|
||||
// CHECK: [[X:%.*]] = alloca [[A:%.*]],
|
||||
// CHECK: [[EHCLEANUPDEST:%.*]] = alloca i32
|
||||
// CHECK: [[Y:%.*]] = alloca [[A]]
|
||||
// CHECK: [[Z:%.*]] = alloca [[A]]
|
||||
// CHECK: [[CLEANUPDEST:%.*]] = alloca i32
|
||||
|
||||
A x;
|
||||
// CHECK: invoke zeroext i1 @_ZN6test126opaqueERKNS_1AE(
|
||||
if (opaque(x)) {
|
||||
A y;
|
||||
A z;
|
||||
|
||||
// CHECK: invoke void @_ZN6test121AD1Ev([[A]]* [[Z]])
|
||||
// CHECK: invoke void @_ZN6test121AD1Ev([[A]]* [[Y]])
|
||||
|
||||
// It'd be great if something eliminated this switch.
|
||||
// CHECK: load i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: switch i32
|
||||
goto success;
|
||||
}
|
||||
|
||||
success:
|
||||
bool _ = true;
|
||||
|
||||
// CHECK: call void @_ZN6test121AD1Ev([[A]]* [[X]])
|
||||
// CHECK-NEXT: ret void
|
||||
}
|
||||
}
|
||||
|
||||
// Reduced from some TableGen code that was causing a self-host crash.
|
||||
namespace test13 {
|
||||
struct A { ~A(); };
|
||||
|
||||
void test0(int x) {
|
||||
try {
|
||||
switch (x) {
|
||||
case 0:
|
||||
break;
|
||||
case 1:{
|
||||
A a;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return;
|
||||
}
|
||||
return;
|
||||
} catch (int x) {
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void test1(int x) {
|
||||
A y;
|
||||
try {
|
||||
switch (x) {
|
||||
default: break;
|
||||
}
|
||||
} catch (int x) {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,16 +10,20 @@ void test0() {
|
|||
@try {
|
||||
// CHECK: invoke void @opaque()
|
||||
opaque();
|
||||
|
||||
// CHECK: call void @log(i32 1)
|
||||
|
||||
} @catch (C *c) {
|
||||
// CHECK: call i8* @llvm.eh.exception()
|
||||
// CHECK: call i32 (i8*, i8*, ...)* @llvm.eh.selector({{.*}} @__gnu_objc_personality_v0
|
||||
// CHECK: br i1
|
||||
// CHECK: call void @objc_exception_throw
|
||||
|
||||
// CHECK: call void @log(i32 0)
|
||||
|
||||
// CHECK: call void @objc_exception_throw
|
||||
|
||||
log(0);
|
||||
}
|
||||
|
||||
// CHECK: call void @log(i32 1)
|
||||
log(1);
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче