diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 1d4066580bb0..cc2255527aa4 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -888,8 +888,26 @@ MarkThreadDataConservatively(JSTracer *trc, ThreadData *td) void MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv) { + /* + * Normally, the drainMarkStack phase of marking will never trace outside + * of the compartment currently being collected. However, conservative + * scanning during drainMarkStack (as is done for generators) can break + * this invariant. So we disable the compartment assertions in this + * situation. + */ + struct AutoSkipChecking { + JSRuntime *runtime; + JSCompartment *savedCompartment; + + AutoSkip(JSRuntime *rt) + : runtime(rt), savedCompartment(rt->gcCheckCompartment) { + rt->gcCheckCompartment = NULL; + } + ~AutoSkip() { runtime->gcCheckCompartment = savedCompartment; } + } as(trc->context->runtime); + const jsuword *begin = beginv->payloadWord(); - const jsuword *end = endv->payloadWord();; + const jsuword *end = endv->payloadWord(); #ifdef JS_NUNBOX32 /* * With 64-bit jsvals on 32-bit systems, we can optimize a bit by