During jump-scope checking, build an ExprWithCleanups immediately

into the enclosing scope;  this is a more accurate model but is
(I believe) unnecessary in my test case due to other flaws.
However, one of those flaws is now intentional:  blocks which
appear in return statements can be trivially observed to not
extend in lifetime past the return, and so we can allow a jump
past them.  Do the necessary magic in IR-generation to make
this work.

git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@164589 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
John McCall 2012-09-25 06:56:03 +00:00
Родитель e49ff3ef34
Коммит 9f357de8d5
4 изменённых файлов: 78 добавлений и 3 удалений

Просмотреть файл

@ -743,6 +743,17 @@ void CodeGenFunction::EmitReturnStmt(const ReturnStmt &S) {
// Emit the result value, even if unused, to evalute the side effects.
const Expr *RV = S.getRetValue();
// Treat block literals in a return expression as if they appeared
// in their own scope. This permits a small, easily-implemented
// exception to our over-conservative rules about not jumping to
// statements following block literals with non-trivial cleanups.
RunCleanupsScope cleanupScope(*this);
if (const ExprWithCleanups *cleanups =
dyn_cast_or_null<ExprWithCleanups>(RV)) {
enterFullExpression(cleanups);
RV = cleanups->getSubExpr();
}
// FIXME: Clean this up by using an LValue for ReturnTemp,
// EmitStoreThroughLValue, and EmitAnyExpr.
if (S.getNRVOCandidate() && S.getNRVOCandidate()->isNRVOVariable() &&
@ -779,6 +790,7 @@ void CodeGenFunction::EmitReturnStmt(const ReturnStmt &S) {
AggValueSlot::IsNotAliased));
}
cleanupScope.ForceCleanup();
EmitBranchThroughCleanup(ReturnBlock);
}

Просмотреть файл

@ -453,14 +453,19 @@ void JumpScopeChecker::BuildScopeInformation(Stmt *S, unsigned &origParentScope)
BuildScopeInformation(AS->getSubStmt(), (newParentScope = Scopes.size()-1));
continue;
}
if (const BlockExpr *BE = dyn_cast<BlockExpr>(SubStmt)) {
const BlockDecl *BDecl = BE->getBlockDecl();
// Disallow jumps past full-expressions that use blocks with
// non-trivial cleanups of their captures. This is theoretically
// implementable but a lot of work which we haven't felt up to doing.
if (ExprWithCleanups *EWC = dyn_cast<ExprWithCleanups>(SubStmt)) {
for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
const BlockDecl *BDecl = EWC->getObject(i);
for (BlockDecl::capture_const_iterator ci = BDecl->capture_begin(),
ce = BDecl->capture_end(); ci != ce; ++ci) {
VarDecl *variable = ci->getVariable();
BuildScopeInformation(variable, BDecl, ParentScope);
}
}
}
// Recursively walk the AST.

Просмотреть файл

@ -365,6 +365,7 @@ namespace test7 {
// CHECK-NEXT: invoke void @_ZN5test71BC1ERKNS_1AEPS0_(
// CHECK: store i1 false, i1* [[OUTER_NEW]]
// CHECK: phi
// CHECK-NEXT: store [[B]]*
// Destroy the inner A object.
// CHECK-NEXT: load i1* [[INNER_A]]

Просмотреть файл

@ -532,3 +532,60 @@ void test16() {
// CHECK-NEXT: [[SLOTREL:%.*]] = getelementptr inbounds [[BLOCK_T]]* [[BLOCK]], i32 0, i32 5
// CHECK-NEXT: store void ()* null, void ()** [[BLKVAR]], align 8
}
// rdar://12151005
//
// This is an intentional exception to our conservative jump-scope
// checking for full-expressions containing block literals with
// non-trivial cleanups: if the block literal appears in the operand
// of a return statement, there's no need to extend its lifetime.
id (^test17(id self, int which))(void) {
switch (which) {
case 1: return ^{ return self; };
case 0: return ^{ return self; };
}
return (void*) 0;
}
// CHECK: define i8* ()* @test17(
// CHECK: [[RET:%.*]] = alloca i8* ()*, align
// CHECK-NEXT: [[SELF:%.*]] = alloca i8*,
// CHECK: [[B0:%.*]] = alloca [[BLOCK:<.*>]], align
// CHECK: [[B1:%.*]] = alloca [[BLOCK]], align
// CHECK: [[T0:%.*]] = call i8* @objc_retain(i8*
// CHECK-NEXT: store i8* [[T0]], i8** [[SELF]], align
// CHECK-NOT: objc_retain
// CHECK-NOT: objc_release
// CHECK: [[DESTROY:%.*]] = getelementptr inbounds [[BLOCK]]* [[B0]], i32 0, i32 5
// CHECK-NOT: objc_retain
// CHECK-NOT: objc_release
// CHECK: [[T0:%.*]] = getelementptr inbounds [[BLOCK]]* [[B0]], i32 0, i32 5
// CHECK-NEXT: [[T1:%.*]] = load i8** [[SELF]], align
// CHECK-NEXT: [[T2:%.*]] = call i8* @objc_retain(i8* [[T1]])
// CHECK-NEXT: store i8* [[T2]], i8** [[T0]],
// CHECK-NEXT: [[T0:%.*]] = bitcast [[BLOCK]]* [[B0]] to i8* ()*
// CHECK-NEXT: [[T1:%.*]] = bitcast i8* ()* [[T0]] to i8*
// CHECK-NEXT: [[T2:%.*]] = call i8* @objc_retainBlock(i8* [[T1]])
// CHECK-NEXT: [[T3:%.*]] = bitcast i8* [[T2]] to i8* ()*
// CHECK-NEXT: store i8* ()* [[T3]], i8* ()** [[RET]]
// CHECK-NEXT: [[T0:%.*]] = load i8** [[DESTROY]]
// CHECK-NEXT: call void @objc_release(i8* [[T0]])
// CHECK-NEXT: store i32
// CHECK-NEXT: br label
// CHECK-NOT: objc_retain
// CHECK-NOT: objc_release
// CHECK: [[DESTROY:%.*]] = getelementptr inbounds [[BLOCK]]* [[B1]], i32 0, i32 5
// CHECK-NOT: objc_retain
// CHECK-NOT: objc_release
// CHECK: [[T0:%.*]] = getelementptr inbounds [[BLOCK]]* [[B1]], i32 0, i32 5
// CHECK-NEXT: [[T1:%.*]] = load i8** [[SELF]], align
// CHECK-NEXT: [[T2:%.*]] = call i8* @objc_retain(i8* [[T1]])
// CHECK-NEXT: store i8* [[T2]], i8** [[T0]],
// CHECK-NEXT: [[T0:%.*]] = bitcast [[BLOCK]]* [[B1]] to i8* ()*
// CHECK-NEXT: [[T1:%.*]] = bitcast i8* ()* [[T0]] to i8*
// CHECK-NEXT: [[T2:%.*]] = call i8* @objc_retainBlock(i8* [[T1]])
// CHECK-NEXT: [[T3:%.*]] = bitcast i8* [[T2]] to i8* ()*
// CHECK-NEXT: store i8* ()* [[T3]], i8* ()** [[RET]]
// CHECK-NEXT: [[T0:%.*]] = load i8** [[DESTROY]]
// CHECK-NEXT: call void @objc_release(i8* [[T0]])
// CHECK-NEXT: store i32
// CHECK-NEXT: br label