Merge mozilla-inbound and my changes

This commit is contained in:
Wan-Teh Chang 2012-04-05 15:49:52 -07:00
Родитель cebb3984e6 eb88843bb2
Коммит 2bd24ee27f
28 изменённых файлов: 401 добавлений и 271 удалений

Просмотреть файл

@ -1475,6 +1475,7 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(nsGlobalWindow)
cb.NoteNativeChild(timeout, &NS_CYCLE_COLLECTION_NAME(nsTimeout));
}
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mLocalStorage)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mSessionStorage)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mApplicationCache)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mDocumentPrincipal)
@ -1512,6 +1513,7 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(nsGlobalWindow)
tmp->mListenerManager->Disconnect();
NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mListenerManager)
}
NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mLocalStorage)
NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mSessionStorage)
NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mApplicationCache)
NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mDocumentPrincipal)

Просмотреть файл

@ -915,7 +915,6 @@ static const char js_relimit_option_str[]= JS_OPTIONS_DOT_STR "relimit";
#ifdef JS_GC_ZEAL
static const char js_zeal_option_str[] = JS_OPTIONS_DOT_STR "gczeal";
static const char js_zeal_frequency_str[] = JS_OPTIONS_DOT_STR "gczeal.frequency";
static const char js_zeal_compartment_str[] = JS_OPTIONS_DOT_STR "gczeal.compartment_gc";
#endif
static const char js_methodjit_content_str[] = JS_OPTIONS_DOT_STR "methodjit.content";
static const char js_methodjit_chrome_str[] = JS_OPTIONS_DOT_STR "methodjit.chrome";
@ -1022,9 +1021,8 @@ nsJSContext::JSOptionChangedCallback(const char *pref, void *data)
#ifdef JS_GC_ZEAL
PRInt32 zeal = Preferences::GetInt(js_zeal_option_str, -1);
PRInt32 frequency = Preferences::GetInt(js_zeal_frequency_str, JS_DEFAULT_ZEAL_FREQ);
bool compartment = Preferences::GetBool(js_zeal_compartment_str, false);
if (zeal >= 0)
::JS_SetGCZeal(context->mContext, (PRUint8)zeal, frequency, compartment);
::JS_SetGCZeal(context->mContext, (PRUint8)zeal, frequency);
#endif
return 0;

Просмотреть файл

@ -320,7 +320,7 @@ CreateJSContextForWorker(WorkerPrivate* aWorkerPrivate)
NS_ASSERTION(zeal <= 3, "Bad zeal value!");
PRUint32 frequency = zeal <= 2 ? JS_DEFAULT_ZEAL_FREQ : 1;
JS_SetGCZeal(workerCx, zeal, frequency, false);
JS_SetGCZeal(workerCx, zeal, frequency);
}
#endif

Просмотреть файл

@ -3877,7 +3877,7 @@ WorkerPrivate::UpdateGCZealInternal(JSContext* aCx, PRUint8 aGCZeal)
AssertIsOnWorkerThread();
PRUint32 frequency = aGCZeal <= 2 ? JS_DEFAULT_ZEAL_FREQ : 1;
JS_SetGCZeal(aCx, aGCZeal, frequency, false);
JS_SetGCZeal(aCx, aGCZeal, frequency);
for (PRUint32 index = 0; index < mChildWorkers.Length(); index++) {
mChildWorkers[index]->UpdateGCZeal(aCx, aGCZeal);
@ -3891,6 +3891,7 @@ WorkerPrivate::GarbageCollectInternal(JSContext* aCx, bool aShrinking,
{
AssertIsOnWorkerThread();
js::PrepareForFullGC(JS_GetRuntime(aCx));
if (aShrinking) {
js::ShrinkingGC(aCx, js::gcreason::DOM_WORKER);
}

Просмотреть файл

@ -430,7 +430,7 @@ GCZeal(JSContext *cx,
if (!JS_ValueToECMAUint32(cx, argv[0], &zeal))
return JS_FALSE;
JS_SetGCZeal(cx, PRUint8(zeal), JS_DEFAULT_ZEAL_FREQ, JS_FALSE);
JS_SetGCZeal(cx, PRUint8(zeal), JS_DEFAULT_ZEAL_FREQ);
return JS_TRUE;
}
#endif

Просмотреть файл

@ -21,18 +21,33 @@ using namespace JS;
static JSBool
GC(JSContext *cx, unsigned argc, jsval *vp)
{
JSCompartment *comp = NULL;
/*
* If the first argument is 'compartment', we collect any compartments
* previously scheduled for GC via schedulegc. If the first argument is an
* object, we collect the object's compartment (any any other compartments
* scheduled for GC). Otherwise, we collect call compartments.
*/
JSBool compartment = false;
if (argc == 1) {
Value arg = vp[2];
if (arg.isObject())
comp = UnwrapObject(&arg.toObject())->compartment();
if (arg.isString()) {
if (!JS_StringEqualsAscii(cx, arg.toString(), "compartment", &compartment))
return false;
} else if (arg.isObject()) {
PrepareCompartmentForGC(UnwrapObject(&arg.toObject())->compartment());
compartment = true;
}
}
#ifndef JS_MORE_DETERMINISTIC
size_t preBytes = cx->runtime->gcBytes;
#endif
JS_CompartmentGC(cx, comp);
if (compartment)
PrepareForDebugGC(cx->runtime);
else
PrepareForFullGC(cx->runtime);
GCForReason(cx, gcreason::API);
char buf[256] = { '\0' };
#ifndef JS_MORE_DETERMINISTIC
@ -154,9 +169,8 @@ static JSBool
GCZeal(JSContext *cx, unsigned argc, jsval *vp)
{
uint32_t zeal, frequency = JS_DEFAULT_ZEAL_FREQ;
JSBool compartment = JS_FALSE;
if (argc > 3) {
if (argc > 2) {
ReportUsageError(cx, &JS_CALLEE(cx, vp).toObject(), "Too many arguments");
return JS_FALSE;
}
@ -165,10 +179,8 @@ GCZeal(JSContext *cx, unsigned argc, jsval *vp)
if (argc >= 2)
if (!JS_ValueToECMAUint32(cx, vp[3], &frequency))
return JS_FALSE;
if (argc >= 3)
compartment = js_ValueToBoolean(vp[3]);
JS_SetGCZeal(cx, (uint8_t)zeal, frequency, compartment);
JS_SetGCZeal(cx, (uint8_t)zeal, frequency);
*vp = JSVAL_VOID;
return JS_TRUE;
}
@ -176,23 +188,45 @@ GCZeal(JSContext *cx, unsigned argc, jsval *vp)
static JSBool
ScheduleGC(JSContext *cx, unsigned argc, jsval *vp)
{
uint32_t count;
bool compartment = false;
if (argc != 1 && argc != 2) {
if (argc != 1) {
ReportUsageError(cx, &JS_CALLEE(cx, vp).toObject(), "Wrong number of arguments");
return JS_FALSE;
}
if (!JS_ValueToECMAUint32(cx, vp[2], &count))
return JS_FALSE;
if (argc == 2)
compartment = js_ValueToBoolean(vp[3]);
JS_ScheduleGC(cx, count, compartment);
Value arg(vp[2]);
if (arg.isInt32()) {
/* Schedule a GC to happen after |arg| allocations. */
JS_ScheduleGC(cx, arg.toInt32());
} else if (arg.isObject()) {
/* Ensure that |comp| is collected during the next GC. */
JSCompartment *comp = UnwrapObject(&arg.toObject())->compartment();
PrepareCompartmentForGC(comp);
} else if (arg.isString()) {
/* This allows us to schedule atomsCompartment for GC. */
PrepareCompartmentForGC(arg.toString()->compartment());
}
*vp = JSVAL_VOID;
return JS_TRUE;
}
static JSBool
SelectForGC(JSContext *cx, unsigned argc, jsval *vp)
{
JSRuntime *rt = cx->runtime;
for (unsigned i = 0; i < argc; i++) {
Value arg(JS_ARGV(cx, vp)[i]);
if (arg.isObject()) {
if (!rt->gcSelectedForMarking.append(&arg.toObject()))
return false;
}
}
*vp = JSVAL_VOID;
return true;
}
static JSBool
VerifyBarriers(JSContext *cx, unsigned argc, jsval *vp)
{
@ -208,17 +242,22 @@ VerifyBarriers(JSContext *cx, unsigned argc, jsval *vp)
static JSBool
GCSlice(JSContext *cx, unsigned argc, jsval *vp)
{
uint32_t budget;
bool limit = true;
uint32_t budget = 0;
if (argc != 1) {
if (argc > 1) {
ReportUsageError(cx, &JS_CALLEE(cx, vp).toObject(), "Wrong number of arguments");
return JS_FALSE;
}
if (!JS_ValueToECMAUint32(cx, vp[2], &budget))
return JS_FALSE;
if (argc == 1) {
if (!JS_ValueToECMAUint32(cx, vp[2], &budget))
return false;
} else {
limit = false;
}
GCDebugSlice(cx, budget);
GCDebugSlice(cx, limit, budget);
*vp = JSVAL_VOID;
return JS_TRUE;
}
@ -475,8 +514,10 @@ Terminate(JSContext *cx, unsigned arg, jsval *vp)
static JSFunctionSpecWithHelp TestingFunctions[] = {
JS_FN_HELP("gc", ::GC, 0, 0,
"gc([obj])",
" Run the garbage collector. When obj is given, GC only its compartment."),
"gc([obj] | 'compartment')",
" Run the garbage collector. When obj is given, GC only its compartment.\n"
" If 'compartment' is given, GC any compartments that were scheduled for\n"
" GC via schedulegc."),
JS_FN_HELP("gcparam", GCParameter, 2, 0,
"gcparam(name [, value])",
@ -502,7 +543,7 @@ static JSFunctionSpecWithHelp TestingFunctions[] = {
#ifdef JS_GC_ZEAL
JS_FN_HELP("gczeal", GCZeal, 2, 0,
"gczeal(level, [period], [compartmentGC?])",
"gczeal(level, [period])",
" Specifies how zealous the garbage collector should be. Values for level:\n"
" 0: Normal amount of collection\n"
" 1: Collect when roots are added or removed\n"
@ -510,12 +551,16 @@ static JSFunctionSpecWithHelp TestingFunctions[] = {
" 3: Collect when the window paints (browser only)\n"
" 4: Verify write barriers between instructions\n"
" 5: Verify write barriers between paints\n"
" Period specifies that collection happens every n allocations.\n"
" If compartmentGC is true, the collections will be compartmental."),
" Period specifies that collection happens every n allocations.\n"),
JS_FN_HELP("schedulegc", ScheduleGC, 1, 0,
"schedulegc(num, [compartmentGC?])",
" Schedule a GC to happen after num allocations."),
"schedulegc(num | obj)",
" If num is given, schedule a GC after num allocations.\n"
" If obj is given, schedule a GC of obj's compartment."),
JS_FN_HELP("selectforgc", SelectForGC, 0, 0,
"selectforgc(obj1, obj2, ...)",
" Schedule the given objects to be marked in the next GC slice."),
JS_FN_HELP("verifybarriers", VerifyBarriers, 0, 0,
"verifybarriers()",

Просмотреть файл

@ -328,7 +328,8 @@ Statistics::formatData(StatisticsSerializer &ss, uint64_t timestamp)
if (ss.isJSON())
ss.appendNumber("Timestamp", "%llu", "", (unsigned long long)timestamp);
ss.appendNumber("Total Time", "%.1f", "ms", t(total));
ss.appendString("Type", wasFullGC ? "global" : "compartment");
ss.appendNumber("Compartments Collected", "%d", "", collectedCount);
ss.appendNumber("Total Compartments", "%d", "", compartmentCount);
ss.appendNumber("MMU (20ms)", "%d", "%", int(mmu20 * 100));
ss.appendNumber("MMU (50ms)", "%d", "%", int(mmu50 * 100));
if (slices.length() > 1 || ss.isJSON())
@ -398,7 +399,8 @@ Statistics::Statistics(JSRuntime *rt)
startupTime(PRMJ_Now()),
fp(NULL),
fullFormat(false),
wasFullGC(false),
collectedCount(0),
compartmentCount(0),
nonincrementalReason(NULL)
{
PodArrayZero(phaseTotals);
@ -492,7 +494,7 @@ Statistics::endGC()
phaseTotals[i] += phaseTimes[i];
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, wasFullGC ? 0 : 1);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == compartmentCount ? 0 : 1);
(*cb)(JS_TELEMETRY_GC_MS, t(gcDuration()));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
(*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
@ -508,9 +510,10 @@ Statistics::endGC()
}
void
Statistics::beginSlice(bool full, gcreason::Reason reason)
Statistics::beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason)
{
wasFullGC = full;
collectedCount = collectedCount;
compartmentCount = compartmentCount;
bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (first)
@ -522,6 +525,7 @@ Statistics::beginSlice(bool full, gcreason::Reason reason)
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
(*cb)(JS_TELEMETRY_GC_REASON, reason);
bool wasFullGC = collectedCount == compartmentCount;
if (GCSliceCallback cb = runtime->gcSliceCallback)
(*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, GCDescription(!wasFullGC));
}
@ -540,6 +544,7 @@ Statistics::endSlice()
if (last)
endGC();
bool wasFullGC = collectedCount == compartmentCount;
if (GCSliceCallback cb = runtime->gcSliceCallback) {
if (last)
(*cb)(runtime, GC_CYCLE_END, GCDescription(!wasFullGC));

Просмотреть файл

@ -94,7 +94,7 @@ struct Statistics {
void beginPhase(Phase phase);
void endPhase(Phase phase);
void beginSlice(bool full, gcreason::Reason reason);
void beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason);
void endSlice();
void reset(const char *reason) { slices.back().resetReason = reason; }
@ -116,7 +116,8 @@ struct Statistics {
FILE *fp;
bool fullFormat;
bool wasFullGC;
int collectedCount;
int compartmentCount;
const char *nonincrementalReason;
struct SliceData {
@ -162,9 +163,13 @@ struct Statistics {
};
struct AutoGCSlice {
AutoGCSlice(Statistics &stats, bool full, gcreason::Reason reason
AutoGCSlice(Statistics &stats, int collectedCount, int compartmentCount, gcreason::Reason reason
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(full, reason); }
: stats(stats)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
stats.beginSlice(collectedCount, compartmentCount, reason);
}
~AutoGCSlice() { stats.endSlice(); }
Statistics &stats;

Просмотреть файл

@ -11,3 +11,11 @@ if (!("gczeal" in this)) {
if (!("schedulegc" in this)) {
schedulegc = function() { }
}
if (!("gcslice" in this)) {
gcslice = function() { }
}
if (!("selectforgc" in this)) {
selectforgc = function() { }
}

Просмотреть файл

@ -0,0 +1,31 @@
var objs;
function init()
{
objs = new Object();
var x = new Object();
objs.root1 = x;
objs.root2 = new Object();
x.ptr = new Object();
x = null;
/*
* Clears out the arena lists. Otherwise all the objects above
* would be considered to be created during the incremental GC.
*/
gc();
}
/*
* Use eval here so that the interpreter frames end up higher on the
* stack, which avoids them being seen later on by the conservative
* scanner.
*/
eval("init()");
gcslice(0);
selectforgc(objs.root2);
gcslice(1);
objs.root2.ptr = objs.root1.ptr;
objs.root1.ptr = null;
gcslice();

Просмотреть файл

@ -0,0 +1,9 @@
/* Make sure we don't collect the atoms compartment unless every compartment is marked. */
var g = newGlobal('new-compartment');
g.eval("var x = 'some-atom';");
schedulegc(this);
schedulegc('atoms');
gc('compartment');
print(g.x);

Просмотреть файл

@ -0,0 +1,10 @@
/* Exercise the path where we want to collect a new compartment in the middle of incremental GC. */
var g1 = newGlobal('new-compartment');
var g2 = newGlobal('new-compartment');
schedulegc(g1);
gcslice(0);
schedulegc(g2);
gcslice(1);
gcslice();

Просмотреть файл

@ -0,0 +1,11 @@
/* Exercise the path where we want to collect a new compartment in the middle of incremental GC. */
var g1 = newGlobal('new-compartment');
var g2 = newGlobal('new-compartment');
schedulegc(g1);
schedulegc(g2);
gcslice(0);
schedulegc(g1);
gcslice(1);
gcslice();

Просмотреть файл

@ -730,18 +730,14 @@ JSRuntime::JSRuntime()
gcJitReleaseTime(0),
gcMode(JSGC_MODE_GLOBAL),
gcIsNeeded(0),
gcFullIsNeeded(0),
gcWeakMapList(NULL),
gcStats(thisFromCtor()),
gcNumber(0),
gcStartNumber(0),
gcTriggerReason(gcreason::NO_REASON),
gcIsFull(false),
gcStrictCompartmentChecking(false),
gcIncrementalState(gc::NO_INCREMENTAL),
gcCompartmentCreated(false),
gcLastMarkSlice(false),
gcIncrementalIsFull(false),
gcInterFrameGC(0),
gcSliceBudget(SliceBudget::Unlimited),
gcIncrementalEnabled(true),
@ -751,7 +747,6 @@ JSRuntime::JSRuntime()
gcZeal_(0),
gcZealFrequency(0),
gcNextScheduled(0),
gcDebugCompartmentGC(false),
gcDeterministicOnly(false),
#endif
gcCallback(NULL),
@ -2864,26 +2859,12 @@ JS_IsGCMarkingTracer(JSTracer *trc)
return IS_GC_MARKING_TRACER(trc);
}
extern JS_PUBLIC_API(void)
JS_CompartmentGC(JSContext *cx, JSCompartment *comp)
{
AssertNoGC(cx);
/* We cannot GC the atoms compartment alone; use a full GC instead. */
JS_ASSERT(comp != cx->runtime->atomsCompartment);
if (comp) {
PrepareCompartmentForGC(comp);
GC(cx, false, GC_NORMAL, gcreason::API);
} else {
GC(cx, true, GC_NORMAL, gcreason::API);
}
}
JS_PUBLIC_API(void)
JS_GC(JSContext *cx)
{
JS_CompartmentGC(cx, NULL);
AssertNoGC(cx);
PrepareForFullGC(cx->runtime);
GC(cx, GC_NORMAL, gcreason::API);
}
JS_PUBLIC_API(void)
@ -5248,15 +5229,33 @@ JS_DecompileFunctionBody(JSContext *cx, JSFunction *fun, unsigned indent)
}
JS_PUBLIC_API(JSBool)
JS_ExecuteScript(JSContext *cx, JSObject *obj, JSScript *script, jsval *rval)
JS_ExecuteScript(JSContext *cx, JSObject *obj, JSScript *scriptArg, jsval *rval)
{
JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment);
AssertNoGC(cx);
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj, script);
assertSameCompartment(cx, obj);
AutoLastFrameCheck lfc(cx);
return Execute(cx, script, *obj, rval);
JS::Anchor<JSScript *> script;
/*
* Mozilla caches pre-compiled scripts (e.g., in the XUL prototype cache)
* and runs them against multiple globals. With a compartment per global,
* this requires cloning the pre-compiled script into each new global.
* Since each script gets run once, there is no point in trying to cache
* this clone. Ideally, this would be handled at some pinch point in
* mozilla, but there doesn't seem to be one, so we handle it here.
*/
if (scriptArg->compartment() != obj->compartment()) {
script = CloneScript(cx, scriptArg);
if (!script.get())
return false;
} else {
script = scriptArg;
}
return Execute(cx, script.get(), *obj, rval);
}
JS_PUBLIC_API(JSBool)
@ -6550,14 +6549,13 @@ JS_AbortIfWrongThread(JSRuntime *rt)
#ifdef JS_GC_ZEAL
JS_PUBLIC_API(void)
JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency, JSBool compartment)
JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency)
{
#ifdef JS_GC_ZEAL
const char *env = getenv("JS_GC_ZEAL");
if (env) {
zeal = atoi(env);
frequency = 1;
compartment = false;
}
#endif
@ -6565,14 +6563,12 @@ JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency, JSBool compartment
cx->runtime->gcZeal_ = zeal;
cx->runtime->gcZealFrequency = frequency;
cx->runtime->gcNextScheduled = schedule ? frequency : 0;
cx->runtime->gcDebugCompartmentGC = !!compartment;
}
JS_PUBLIC_API(void)
JS_ScheduleGC(JSContext *cx, uint32_t count, JSBool compartment)
JS_ScheduleGC(JSContext *cx, uint32_t count)
{
cx->runtime->gcNextScheduled = count;
cx->runtime->gcDebugCompartmentGC = !!compartment;
}
#endif

Просмотреть файл

@ -143,6 +143,8 @@ template<> class AnchorPermitted<const JSFunction *> { };
template<> class AnchorPermitted<JSString *> { };
template<> class AnchorPermitted<const JSString *> { };
template<> class AnchorPermitted<Value> { };
template<> class AnchorPermitted<const JSScript *> { };
template<> class AnchorPermitted<JSScript *> { };
template<typename T>
class Anchor: AnchorPermitted<T>
@ -154,12 +156,12 @@ class Anchor: AnchorPermitted<T>
T &get() { return hold; }
const T &get() const { return hold; }
void set(const T &t) { hold = t; }
void operator=(const T &t) { hold = t; }
void clear() { hold = 0; }
private:
T hold;
/* Anchors should not be assigned or passed to functions. */
Anchor(const Anchor &);
const Anchor &operator=(const Anchor &);
Anchor(const Anchor &) MOZ_DELETE;
const Anchor &operator=(const Anchor &) MOZ_DELETE;
};
#ifdef __GNUC__
@ -5529,10 +5531,10 @@ JS_NewObjectForConstructor(JSContext *cx, JSClass *clasp, const jsval *vp);
#define JS_DEFAULT_ZEAL_FREQ 100
extern JS_PUBLIC_API(void)
JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency, JSBool compartment);
JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency);
extern JS_PUBLIC_API(void)
JS_ScheduleGC(JSContext *cx, uint32_t count, JSBool compartment);
JS_ScheduleGC(JSContext *cx, uint32_t count);
#endif
/*

Просмотреть файл

@ -224,7 +224,7 @@ js_FinishAtomState(JSRuntime *rt)
}
bool
js_InitCommonAtoms(JSContext *cx)
js::InitCommonAtoms(JSContext *cx)
{
JSAtomState *state = &cx->runtime->atomState;
JSAtom **atoms = state->commonAtomsStart();
@ -242,19 +242,19 @@ js_InitCommonAtoms(JSContext *cx)
}
void
js_FinishCommonAtoms(JSContext *cx)
js::FinishCommonAtoms(JSRuntime *rt)
{
cx->runtime->emptyString = NULL;
cx->runtime->atomState.junkAtoms();
rt->emptyString = NULL;
rt->atomState.junkAtoms();
}
void
js_TraceAtomState(JSTracer *trc)
js::MarkAtomState(JSTracer *trc, bool markAll)
{
JSRuntime *rt = trc->runtime;
JSAtomState *state = &rt->atomState;
if (rt->gcKeepAtoms) {
if (markAll) {
for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
JSAtom *tmp = r.front().asPtr();
MarkStringRoot(trc, &tmp, "locked_atom");
@ -274,7 +274,7 @@ js_TraceAtomState(JSTracer *trc)
}
void
js_SweepAtomState(JSRuntime *rt)
js::SweepAtomState(JSRuntime *rt)
{
JSAtomState *state = &rt->atomState;

Просмотреть файл

@ -408,19 +408,19 @@ js_FinishAtomState(JSRuntime *rt);
* Atom tracing and garbage collection hooks.
*/
extern void
js_TraceAtomState(JSTracer *trc);
namespace js {
extern void
js_SweepAtomState(JSRuntime *rt);
MarkAtomState(JSTracer *trc, bool markAll);
extern void
SweepAtomState(JSRuntime *rt);
extern bool
js_InitCommonAtoms(JSContext *cx);
InitCommonAtoms(JSContext *cx);
extern void
js_FinishCommonAtoms(JSContext *cx);
namespace js {
FinishCommonAtoms(JSRuntime *rt);
/* N.B. must correspond to boolean tagging behavior. */
enum InternBehavior

Просмотреть файл

@ -207,7 +207,7 @@ js_NewContext(JSRuntime *rt, size_t stackChunkSize)
#endif
bool ok = rt->staticStrings.init(cx);
if (ok)
ok = js_InitCommonAtoms(cx);
ok = InitCommonAtoms(cx);
#ifdef JS_THREADSAFE
JS_EndRequest(cx);
@ -270,17 +270,19 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
c->types.print(cx, false);
/* Unpin all common atoms before final GC. */
js_FinishCommonAtoms(cx);
FinishCommonAtoms(cx->runtime);
/* Clear debugging state to remove GC roots. */
for (CompartmentsIter c(rt); !c.done(); c.next())
c->clearTraps(cx);
JS_ClearAllWatchPoints(cx);
GC(cx, true, GC_NORMAL, gcreason::LAST_CONTEXT);
PrepareForFullGC(rt);
GC(cx, GC_NORMAL, gcreason::LAST_CONTEXT);
} else if (mode == JSDCM_FORCE_GC) {
JS_ASSERT(!rt->gcRunning);
GC(cx, true, GC_NORMAL, gcreason::DESTROY_CONTEXT);
PrepareForFullGC(rt);
GC(cx, GC_NORMAL, gcreason::DESTROY_CONTEXT);
} else if (mode == JSDCM_MAYBE_GC) {
JS_ASSERT(!rt->gcRunning);
JS_MaybeGC(cx);
@ -883,7 +885,7 @@ js_InvokeOperationCallback(JSContext *cx)
JS_ATOMIC_SET(&rt->interrupt, 0);
if (rt->gcIsNeeded)
GCSlice(cx, rt->gcFullIsNeeded, GC_NORMAL, rt->gcTriggerReason);
GCSlice(cx, GC_NORMAL, rt->gcTriggerReason);
#ifdef JS_THREADSAFE
/*

Просмотреть файл

@ -352,7 +352,6 @@ struct JSRuntime : js::RuntimeFriendFields
* full GC.
*/
volatile uintptr_t gcIsNeeded;
volatile uintptr_t gcFullIsNeeded;
js::WeakMapBase *gcWeakMapList;
js::gcstats::Statistics gcStats;
@ -366,9 +365,6 @@ struct JSRuntime : js::RuntimeFriendFields
/* The reason that an interrupt-triggered GC should be called. */
js::gcreason::Reason gcTriggerReason;
/* Is the currently running GC a full GC or a compartmental GC? */
bool gcIsFull;
/*
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
@ -381,15 +377,9 @@ struct JSRuntime : js::RuntimeFriendFields
*/
js::gc::State gcIncrementalState;
/* Indicates that a new compartment was created during incremental GC. */
bool gcCompartmentCreated;
/* Indicates that the last incremental slice exhausted the mark stack. */
bool gcLastMarkSlice;
/* Is there a full incremental GC in progress. */
bool gcIncrementalIsFull;
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be
@ -455,9 +445,10 @@ struct JSRuntime : js::RuntimeFriendFields
int gcZeal_;
int gcZealFrequency;
int gcNextScheduled;
bool gcDebugCompartmentGC;
bool gcDeterministicOnly;
js::Vector<JSObject *, 0, js::SystemAllocPolicy> gcSelectedForMarking;
int gcZeal() { return gcZeal_; }
bool needZealousGC() {

Просмотреть файл

@ -132,31 +132,34 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj
}
JS_FRIEND_API(void)
js::GCForReason(JSContext *cx, gcreason::Reason reason)
js::PrepareCompartmentForGC(JSCompartment *comp)
{
GC(cx, true, GC_NORMAL, reason);
comp->scheduleGC();
}
JS_FRIEND_API(void)
js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reason)
js::PrepareForFullGC(JSRuntime *rt)
{
/* We cannot GC the atoms compartment alone; use a full GC instead. */
JS_ASSERT(comp != cx->runtime->atomsCompartment);
for (CompartmentsIter c(rt); !c.done(); c.next())
c->scheduleGC();
}
PrepareCompartmentForGC(comp);
GC(cx, false, GC_NORMAL, reason);
JS_FRIEND_API(void)
js::GCForReason(JSContext *cx, gcreason::Reason reason)
{
GC(cx, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
js::ShrinkingGC(JSContext *cx, gcreason::Reason reason)
{
GC(cx, true, GC_SHRINK, reason);
GC(cx, GC_SHRINK, reason);
}
JS_FRIEND_API(void)
js::IncrementalGC(JSContext *cx, gcreason::Reason reason)
{
GCSlice(cx, true, GC_NORMAL, reason);
GCSlice(cx, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -753,7 +756,8 @@ NotifyDidPaint(JSContext *cx)
}
if (rt->gcZeal() == gc::ZealFrameGCValue) {
GCSlice(cx, true, GC_NORMAL, gcreason::REFRESH_FRAME);
PrepareForFullGC(rt);
GCSlice(cx, GC_NORMAL, gcreason::REFRESH_FRAME);
return;
}
@ -762,7 +766,7 @@ NotifyDidPaint(JSContext *cx)
if (c->needsBarrier())
PrepareCompartmentForGC(c);
}
GCSlice(cx, rt->gcIncrementalIsFull, GC_NORMAL, gcreason::REFRESH_FRAME);
GCSlice(cx, GC_NORMAL, gcreason::REFRESH_FRAME);
}
rt->gcInterFrameGC = false;

Просмотреть файл

@ -667,10 +667,20 @@ enum Reason {
} /* namespace gcreason */
extern JS_FRIEND_API(void)
GCForReason(JSContext *cx, gcreason::Reason reason);
PrepareCompartmentForGC(JSCompartment *comp);
extern JS_FRIEND_API(void)
CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reason);
PrepareForFullGC(JSRuntime *rt);
/*
* When triggering a GC using one of the functions below, it is first necessary
* to select the compartments to be collected. To do this, you can call
* PrepareCompartmentForGC on each compartment, or you can call PrepareForFullGC
* to select all compartments. Failing to select any compartment is an error.
*/
extern JS_FRIEND_API(void)
GCForReason(JSContext *cx, gcreason::Reason reason);
extern JS_FRIEND_API(void)
ShrinkingGC(JSContext *cx, gcreason::Reason reason);

Просмотреть файл

@ -1642,13 +1642,13 @@ ArenaLists::finalizeScripts(FreeOp *fop)
}
static void
RunLastDitchGC(JSContext *cx, gcreason::Reason reason, bool full)
RunLastDitchGC(JSContext *cx, gcreason::Reason reason)
{
JSRuntime *rt = cx->runtime;
/* The last ditch GC preserves all atoms. */
AutoKeepAtoms keep(rt);
GC(cx, full, GC_NORMAL, reason);
GC(cx, GC_NORMAL, reason);
}
/* static */ void *
@ -1664,7 +1664,7 @@ ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
for (;;) {
if (JS_UNLIKELY(runGC)) {
PrepareCompartmentForGC(comp);
RunLastDitchGC(cx, gcreason::LAST_DITCH, rt->gcFullIsNeeded);
RunLastDitchGC(cx, gcreason::LAST_DITCH);
/*
* The JSGC_END callback can legitimately allocate new GC
@ -1768,7 +1768,8 @@ SliceBudget::TimeBudget(int64_t millis)
/* static */ int64_t
SliceBudget::WorkBudget(int64_t work)
{
return -work;
/* For work = 0 not to mean Unlimited, we subtract 1. */
return -work - 1;
}
SliceBudget::SliceBudget()
@ -1787,7 +1788,7 @@ SliceBudget::SliceBudget(int64_t budget)
counter = CounterReset;
} else {
deadline = 0;
counter = -budget;
counter = -budget - 1;
}
}
@ -1956,7 +1957,8 @@ GCMarker::markDelayedChildren(SliceBudget &budget)
markLaterArenas--;
markDelayedChildren(aheader);
if (budget.checkOverBudget())
budget.step(150);
if (budget.isOverBudget())
return false;
} while (unmarkedArenaStackTop);
JS_ASSERT(!markLaterArenas);
@ -2240,7 +2242,7 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
JSRuntime *rt = trc->runtime;
JS_ASSERT(trc->callback != GCMarker::GrayCallback);
if (IS_GC_MARKING_TRACER(trc) && !rt->gcIsFull) {
if (IS_GC_MARKING_TRACER(trc)) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->isCollecting())
c->markCrossCompartmentWrappers(trc);
@ -2265,7 +2267,20 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
}
js_TraceAtomState(trc);
/*
* Atoms are not in the cross-compartment map. So if there are any
* compartments that are not being collected, we are not allowed to collect
* atoms. Otherwise, the non-collected compartments could contain pointers
* to atoms that we would miss.
*/
bool isFullGC = true;
if (IS_GC_MARKING_TRACER(trc)) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->isCollecting())
isFullGC = false;
}
}
MarkAtomState(trc, rt->gcKeepAtoms || !isFullGC);
rt->staticStrings.trace(trc);
for (ContextIter acx(rt); !acx.done(); acx.next())
@ -2322,26 +2337,37 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
}
}
static void
TriggerOperationCallback(JSRuntime *rt, gcreason::Reason reason)
{
if (rt->gcIsNeeded)
return;
rt->gcIsNeeded = true;
rt->gcTriggerReason = reason;
rt->triggerOperationCallback();
}
void
TriggerGC(JSRuntime *rt, gcreason::Reason reason)
{
JS_ASSERT(rt->onOwnerThread());
if (rt->gcRunning || rt->gcIsNeeded)
if (rt->gcRunning)
return;
/* Trigger the GC when it is safe to call an operation callback. */
rt->gcIsNeeded = true;
rt->gcFullIsNeeded = true;
rt->gcTriggerReason = reason;
rt->triggerOperationCallback();
PrepareForFullGC(rt);
TriggerOperationCallback(rt, reason);
}
void
TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
{
JSRuntime *rt = comp->rt;
JS_ASSERT(!rt->gcRunning);
JS_ASSERT(rt->onOwnerThread());
if (rt->gcRunning)
return;
if (rt->gcZeal() == ZealAllocValue) {
TriggerGC(rt, reason);
@ -2355,17 +2381,7 @@ TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
}
PrepareCompartmentForGC(comp);
if (rt->gcIsNeeded)
return;
/*
* Trigger the GC when it is safe to call an operation callback on any
* thread.
*/
rt->gcIsNeeded = true;
rt->gcTriggerReason = reason;
rt->triggerOperationCallback();
TriggerOperationCallback(rt, reason);
}
void
@ -2375,13 +2391,14 @@ MaybeGC(JSContext *cx)
JS_ASSERT(rt->onOwnerThread());
if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
GC(cx, true, GC_NORMAL, gcreason::MAYBEGC);
PrepareForFullGC(rt);
GC(cx, GC_NORMAL, gcreason::MAYBEGC);
return;
}
JSCompartment *comp = cx->compartment;
if (rt->gcIsNeeded) {
GCSlice(cx, rt->gcFullIsNeeded, GC_NORMAL, gcreason::MAYBEGC);
GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
return;
}
@ -2390,13 +2407,13 @@ MaybeGC(JSContext *cx)
rt->gcIncrementalState == NO_INCREMENTAL)
{
PrepareCompartmentForGC(comp);
GCSlice(cx, false, GC_NORMAL, gcreason::MAYBEGC);
GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
return;
}
if (comp->gcMallocAndFreeBytes > comp->gcTriggerMallocAndFreeBytes) {
PrepareCompartmentForGC(comp);
GCSlice(cx, false, GC_NORMAL, gcreason::MAYBEGC);
GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
return;
}
@ -2410,7 +2427,8 @@ MaybeGC(JSContext *cx)
if (rt->gcChunkAllocationSinceLastGC ||
rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
{
GCSlice(cx, true, GC_SHRINK, gcreason::MAYBEGC);
PrepareForFullGC(rt);
GCSlice(cx, GC_SHRINK, gcreason::MAYBEGC);
} else {
rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
}
@ -2808,12 +2826,6 @@ GCHelperThread::doSweep()
#endif /* JS_THREADSAFE */
void
PrepareCompartmentForGC(JSCompartment *comp)
{
comp->scheduleGC();
}
} /* namespace js */
static bool
@ -2845,7 +2857,7 @@ SweepCompartments(FreeOp *fop, JSGCInvocationKind gckind)
while (read < end) {
JSCompartment *compartment = *read++;
if (!compartment->hold &&
if (!compartment->hold && compartment->isCollecting() &&
(compartment->arenas.arenaListsAreEmpty() || !rt->hasContexts()))
{
compartment->arenas.checkEmptyFreeLists();
@ -2978,11 +2990,9 @@ EndMarkPhase(JSContext *cx)
#ifdef DEBUG
/* Make sure that we didn't mark an object in another compartment */
if (!rt->gcIsFull) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
JS_ASSERT_IF(!c->isCollecting() && c != rt->atomsCompartment,
c->arenas.checkArenaListAllUnmarked());
}
for (CompartmentsIter c(rt); !c.done(); c.next()) {
JS_ASSERT_IF(!c->isCollecting() && c != rt->atomsCompartment,
c->arenas.checkArenaListAllUnmarked());
}
#endif
}
@ -3065,10 +3075,6 @@ ValidateIncrementalMarking(JSContext *cx)
uintptr_t end = arena->thingsEnd();
while (thing < end) {
Cell *cell = (Cell *)thing;
if (bitmap->isMarked(cell, BLACK) && !incBitmap.isMarked(cell, BLACK)) {
JS_DumpHeap(rt, stdout, NULL, JSGCTraceKind(0), NULL, 100000, NULL);
printf("Assertion cell: %p (%d)\n", (void *)cell, cell->getAllocKind());
}
JS_ASSERT_IF(bitmap->isMarked(cell, BLACK), incBitmap.isMarked(cell, BLACK));
thing += Arena::thingSize(kind);
}
@ -3129,7 +3135,7 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
/* Finalize unreachable (key,value) pairs in all weak maps. */
WeakMapBase::sweepAll(&rt->gcMarker);
js_SweepAtomState(rt);
SweepAtomState(rt);
/* Collect watch points associated with unreachable objects. */
WatchpointMap::sweepAll(rt);
@ -3140,7 +3146,7 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS);
bool releaseTypes = rt->gcIsFull && ReleaseObservedTypes(rt);
bool releaseTypes = ReleaseObservedTypes(rt);
for (GCCompartmentsIter c(rt); !c.done(); c.next())
c->sweep(&fop, releaseTypes);
}
@ -3194,8 +3200,7 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
* This removes compartments from rt->compartment, so we do it last to make
* sure we don't miss sweeping any compartments.
*/
if (rt->gcIsFull)
SweepCompartments(&fop, gckind);
SweepCompartments(&fop, gckind);
#ifndef JS_THREADSAFE
/*
@ -3260,7 +3265,7 @@ class AutoHeapSession {
/* ...while this class is to be used only for garbage collection. */
class AutoGCSession : AutoHeapSession {
public:
explicit AutoGCSession(JSRuntime *rt, bool full);
explicit AutoGCSession(JSRuntime *rt);
~AutoGCSession();
};
@ -3279,13 +3284,12 @@ AutoHeapSession::~AutoHeapSession()
runtime->gcRunning = false;
}
AutoGCSession::AutoGCSession(JSRuntime *rt, bool full)
AutoGCSession::AutoGCSession(JSRuntime *rt)
: AutoHeapSession(rt)
{
rt->gcIsFull = full;
DebugOnly<bool> any = false;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (full || c->isGCScheduled()) {
if (c->isGCScheduled()) {
c->setCollecting(true);
any = true;
}
@ -3293,7 +3297,6 @@ AutoGCSession::AutoGCSession(JSRuntime *rt, bool full)
JS_ASSERT(any);
runtime->gcIsNeeded = false;
runtime->gcFullIsNeeded = false;
runtime->gcInterFrameGC = true;
runtime->gcNumber++;
@ -3307,12 +3310,16 @@ AutoGCSession::AutoGCSession(JSRuntime *rt, bool full)
AutoGCSession::~AutoGCSession()
{
runtime->gcIsFull = false;
for (GCCompartmentsIter c(runtime); !c.done(); c.next())
c->setCollecting(false);
runtime->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
runtime->gcChunkAllocationSinceLastGC = false;
#ifdef JS_GC_ZEAL
/* Keeping these around after a GC is dangerous. */
runtime->gcSelectedForMarking.clearAndFree();
#endif
}
static void
@ -3324,7 +3331,6 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason)
for (CompartmentsIter c(rt); !c.done(); c.next())
c->needsBarrier_ = false;
rt->gcIncrementalIsFull = false;
rt->gcMarker.reset();
rt->gcMarker.stop();
rt->gcIncrementalState = NO_INCREMENTAL;
@ -3375,7 +3381,6 @@ AutoGCSlice::~AutoGCSlice()
c->arenas.prepareForIncrementalGC(rt);
} else {
JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
c->needsBarrier_ = false;
}
}
@ -3408,8 +3413,6 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
gc::State initialState = rt->gcIncrementalState;
if (rt->gcIncrementalState == NO_INCREMENTAL) {
JS_ASSERT(!rt->gcIncrementalIsFull);
rt->gcIncrementalIsFull = rt->gcIsFull;
rt->gcIncrementalState = MARK_ROOTS;
rt->gcLastMarkSlice = false;
}
@ -3436,11 +3439,21 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
if (!rt->gcMarker.hasBufferedGrayRoots())
sliceBudget.reset();
#ifdef JS_GC_ZEAL
if (!rt->gcSelectedForMarking.empty()) {
for (JSObject **obj = rt->gcSelectedForMarking.begin();
obj != rt->gcSelectedForMarking.end(); obj++)
{
MarkObjectUnbarriered(&rt->gcMarker, obj, "selected obj");
}
}
#endif
bool finished = rt->gcMarker.drainMarkStack(sliceBudget);
if (finished) {
JS_ASSERT(rt->gcMarker.isDrained());
if (initialState == MARK && !rt->gcLastMarkSlice)
if (initialState == MARK && !rt->gcLastMarkSlice && budget != SliceBudget::Unlimited)
rt->gcLastMarkSlice = true;
else
rt->gcIncrementalState = SWEEP;
@ -3455,8 +3468,6 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
/* JIT code was already discarded during sweeping. */
rt->gcIncrementalIsFull = false;
rt->gcIncrementalState = NO_INCREMENTAL;
}
}
@ -3487,11 +3498,6 @@ class IncrementalSafety
static IncrementalSafety
IsIncrementalGCSafe(JSRuntime *rt)
{
if (rt->gcCompartmentCreated) {
rt->gcCompartmentCreated = false;
return IncrementalSafety::Unsafe("compartment created");
}
if (rt->gcKeepAtoms)
return IncrementalSafety::Unsafe("gcKeepAtoms set");
@ -3548,22 +3554,24 @@ BudgetIncrementalGC(JSRuntime *rt, int64_t *budget)
/*
* GC, repeatedly if necessary, until we think we have not created any new
* garbage. We disable inlining to ensure that the bottom of the stack with
* possible GC roots recorded in js_GC excludes any pointers we use during the
* marking implementation.
* possible GC roots recorded in MarkRuntime excludes any pointers we use during
* the marking implementation.
*/
static JS_NEVER_INLINE void
GCCycle(JSContext *cx, bool full, int64_t budget, JSGCInvocationKind gckind)
GCCycle(JSContext *cx, bool incremental, int64_t budget, JSGCInvocationKind gckind)
{
JSRuntime *rt = cx->runtime;
JS_ASSERT_IF(!full, !rt->atomsCompartment->isCollecting());
JS_ASSERT_IF(!full, rt->gcMode != JSGC_MODE_GLOBAL);
#ifdef DEBUG
for (CompartmentsIter c(rt); !c.done(); c.next())
JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, c->isGCScheduled());
#endif
/* Recursive GC is no-op. */
if (rt->gcRunning)
return;
AutoGCSession gcsession(rt, full);
AutoGCSession gcsession(rt);
/* Don't GC if we are reporting an OOM. */
if (rt->inOOMReport)
@ -3584,7 +3592,7 @@ GCCycle(JSContext *cx, bool full, int64_t budget, JSGCInvocationKind gckind)
}
#endif
if (budget == SliceBudget::Unlimited) {
if (!incremental) {
/* If non-incremental GC was requested, reset incremental GC. */
ResetIncrementalGC(rt, "requested");
rt->gcStats.nonincremental("requested");
@ -3631,7 +3639,7 @@ IsDeterministicGCReason(gcreason::Reason reason)
#endif
static void
Collect(JSContext *cx, bool full, int64_t budget,
Collect(JSContext *cx, bool incremental, int64_t budget,
JSGCInvocationKind gckind, gcreason::Reason reason)
{
JSRuntime *rt = cx->runtime;
@ -3642,7 +3650,7 @@ Collect(JSContext *cx, bool full, int64_t budget,
return;
#endif
JS_ASSERT_IF(budget != SliceBudget::Unlimited, JSGC_INCREMENTAL);
JS_ASSERT_IF(!incremental || budget != SliceBudget::Unlimited, JSGC_INCREMENTAL);
#ifdef JS_GC_ZEAL
bool restartVerify = cx->runtime->gcVerifyData &&
@ -3667,14 +3675,22 @@ Collect(JSContext *cx, bool full, int64_t budget,
RecordNativeStackTopForGC(rt);
if (rt->gcMode == JSGC_MODE_GLOBAL)
full = true;
int compartmentCount = 0;
int collectedCount = 0;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (rt->gcMode == JSGC_MODE_GLOBAL)
c->scheduleGC();
/* This is a heuristic to avoid resets. */
if (rt->gcIncrementalState != NO_INCREMENTAL && rt->gcIncrementalIsFull)
full = true;
/* This is a heuristic to avoid resets. */
if (rt->gcIncrementalState != NO_INCREMENTAL && c->needsBarrier())
c->scheduleGC();
gcstats::AutoGCSlice agc(rt->gcStats, full, reason);
compartmentCount++;
if (c->isGCScheduled())
collectedCount++;
}
gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, compartmentCount, reason);
do {
/*
@ -3691,7 +3707,7 @@ Collect(JSContext *cx, bool full, int64_t budget,
/* Lock out other GC allocator and collector invocations. */
AutoLockGC lock(rt);
rt->gcPoke = false;
GCCycle(cx, full, budget, gckind);
GCCycle(cx, incremental, budget, gckind);
}
if (rt->gcIncrementalState == NO_INCREMENTAL) {
@ -3710,21 +3726,35 @@ Collect(JSContext *cx, bool full, int64_t budget,
namespace js {
void
GC(JSContext *cx, bool full, JSGCInvocationKind gckind, gcreason::Reason reason)
GC(JSContext *cx, JSGCInvocationKind gckind, gcreason::Reason reason)
{
Collect(cx, full, SliceBudget::Unlimited, gckind, reason);
Collect(cx, false, SliceBudget::Unlimited, gckind, reason);
}
void
GCSlice(JSContext *cx, bool full, JSGCInvocationKind gckind, gcreason::Reason reason)
GCSlice(JSContext *cx, JSGCInvocationKind gckind, gcreason::Reason reason)
{
Collect(cx, full, cx->runtime->gcSliceBudget, gckind, reason);
Collect(cx, true, cx->runtime->gcSliceBudget, gckind, reason);
}
void
GCDebugSlice(JSContext *cx, int64_t objCount)
GCDebugSlice(JSContext *cx, bool limit, int64_t objCount)
{
Collect(cx, NULL, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
int64_t budget = limit ? SliceBudget::WorkBudget(objCount) : SliceBudget::Unlimited;
PrepareForDebugGC(cx->runtime);
Collect(cx, true, budget, GC_NORMAL, gcreason::API);
}
/* Schedule a full GC unless a compartment will already be collected. */
void
PrepareForDebugGC(JSRuntime *rt)
{
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->isGCScheduled())
return;
}
PrepareForFullGC(rt);
}
void
@ -3902,15 +3932,6 @@ NewCompartment(JSContext *cx, JSPrincipals *principals)
*/
{
AutoLockGC lock(rt);
/*
* If we're in the middle of an incremental GC, we cancel
* it. Otherwise we might fail the mark the newly created
* compartment fully.
*/
if (rt->gcIncrementalState == MARK)
rt->gcCompartmentCreated = true;
if (rt->compartments.append(compartment))
return compartment;
}
@ -3925,15 +3946,8 @@ void
RunDebugGC(JSContext *cx)
{
#ifdef JS_GC_ZEAL
JSRuntime *rt = cx->runtime;
/*
* If rt->gcDebugCompartmentGC is true, only GC the current
* compartment. But don't GC the atoms compartment.
*/
if (rt->gcDebugCompartmentGC)
PrepareCompartmentForGC(cx->compartment);
RunLastDitchGC(cx, gcreason::DEBUG_GC, !rt->gcDebugCompartmentGC);
PrepareForDebugGC(cx->runtime);
RunLastDitchGC(cx, gcreason::DEBUG_GC);
#endif
}
@ -4321,6 +4335,14 @@ EndVerifyBarriers(JSContext *cx)
if (!trc)
return;
/* We need to disable barriers before tracing, which may invoke barriers. */
for (CompartmentsIter c(rt); !c.done(); c.next()) {
/* Don't verify if a new compartment was created. */
if (!c->needsBarrier_)
return;
c->needsBarrier_ = false;
}
/*
* We need to bump gcNumber so that the methodjit knows that jitcode has
* been discarded.
@ -4328,10 +4350,6 @@ EndVerifyBarriers(JSContext *cx)
JS_ASSERT(trc->number == rt->gcNumber);
rt->gcNumber++;
/* We need to disable barriers before tracing, which may invoke barriers. */
for (CompartmentsIter c(rt); !c.done(); c.next())
c->needsBarrier_ = false;
for (CompartmentsIter c(rt); !c.done(); c.next())
c->discardJitCode(rt->defaultFreeOp());

Просмотреть файл

@ -1384,7 +1384,7 @@ extern void
ShrinkGCBuffers(JSRuntime *rt);
extern void
PrepareCompartmentForGC(JSCompartment *comp);
PrepareForFullGC(JSRuntime *rt);
/*
* Kinds of js_GC invocation.
@ -1397,15 +1397,17 @@ typedef enum JSGCInvocationKind {
GC_SHRINK = 1
} JSGCInvocationKind;
/* Pass NULL for |comp| to get a full GC. */
extern void
GC(JSContext *cx, bool full, JSGCInvocationKind gckind, js::gcreason::Reason reason);
GC(JSContext *cx, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCSlice(JSContext *cx, bool full, JSGCInvocationKind gckind, js::gcreason::Reason reason);
GCSlice(JSContext *cx, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCDebugSlice(JSContext *cx, int64_t objCount);
GCDebugSlice(JSContext *cx, bool limit, int64_t objCount);
extern void
PrepareForDebugGC(JSRuntime *rt);
} /* namespace js */
@ -1729,14 +1731,14 @@ struct SliceBudget {
counter = INTPTR_MAX;
}
void step() {
counter--;
void step(intptr_t amt = 1) {
counter -= amt;
}
bool checkOverBudget();
bool isOverBudget() {
if (counter > 0)
if (counter >= 0)
return false;
return checkOverBudget();
}

Просмотреть файл

@ -83,7 +83,6 @@ CheckMarkedThing(JSTracer *trc, T *thing)
DebugOnly<JSRuntime *> rt = trc->runtime;
JS_ASSERT_IF(rt->gcIsFull, IS_GC_MARKING_TRACER(trc));
JS_ASSERT_IF(thing->compartment()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
JS_ASSERT(thing->isAligned());

Просмотреть файл

@ -652,25 +652,6 @@ MapContextOptionNameToFlag(JSContext* cx, const char* name)
extern JSClass global_class;
#ifdef JS_GC_ZEAL
static void
ParseZealArg(JSContext *cx, const char *arg)
{
int zeal, freq = 1, compartment = 0;
const char *p = strchr(arg, ',');
zeal = atoi(arg);
if (p) {
freq = atoi(p + 1);
p = strchr(p + 1, ',');
if (p)
compartment = atoi(p + 1);
}
JS_SetGCZeal(cx, (uint8_t)zeal, freq, !!compartment);
}
#endif
static JSBool
Version(JSContext *cx, unsigned argc, jsval *vp)
{
@ -4674,11 +4655,6 @@ ProcessArgs(JSContext *cx, JSObject *obj, OptionParser *op)
JS_ToggleOptions(cx, JSOPTION_METHODJIT);
}
#ifdef JS_GC_ZEAL
if (const char *zeal = op->getStringOption('Z'))
ParseZealArg(cx, zeal);
#endif
if (op->getBoolOption('d')) {
JS_SetRuntimeDebugMode(JS_GetRuntime(cx), true);
JS_SetDebugMode(cx, true);

Просмотреть файл

@ -568,7 +568,7 @@ GCZeal(JSContext *cx, unsigned argc, jsval *vp)
if (!JS_ValueToECMAUint32(cx, argc ? JS_ARGV(cx, vp)[0] : JSVAL_VOID, &zeal))
return false;
JS_SetGCZeal(cx, uint8_t(zeal), JS_DEFAULT_ZEAL_FREQ, false);
JS_SetGCZeal(cx, uint8_t(zeal), JS_DEFAULT_ZEAL_FREQ);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}

Просмотреть файл

@ -3620,6 +3620,7 @@ nsXPCComponents_Utils::GetWeakReference(const JS::Value &object, JSContext *cx,
NS_IMETHODIMP
nsXPCComponents_Utils::ForceGC(JSContext *cx)
{
js::PrepareForFullGC(JS_GetRuntime(cx));
js::GCForReason(cx, js::gcreason::COMPONENT_UTILS);
return NS_OK;
}
@ -3628,6 +3629,7 @@ nsXPCComponents_Utils::ForceGC(JSContext *cx)
NS_IMETHODIMP
nsXPCComponents_Utils::ForceShrinkingGC(JSContext *cx)
{
js::PrepareForFullGC(JS_GetRuntime(cx));
js::ShrinkingGC(cx, js::gcreason::COMPONENT_UTILS);
return NS_OK;
}
@ -3655,6 +3657,7 @@ class PreciseGCRunnable : public nsRunnable
}
}
js::PrepareForFullGC(JS_GetRuntime(mCx));
if (mShrinking)
js::ShrinkingGC(mCx, js::gcreason::COMPONENT_UTILS);
else
@ -3929,7 +3932,7 @@ NS_IMETHODIMP
nsXPCComponents_Utils::SetGCZeal(PRInt32 aValue, JSContext* cx)
{
#ifdef JS_GC_ZEAL
JS_SetGCZeal(cx, PRUint8(aValue), JS_DEFAULT_ZEAL_FREQ, false);
JS_SetGCZeal(cx, PRUint8(aValue), JS_DEFAULT_ZEAL_FREQ);
#endif
return NS_OK;
}

Просмотреть файл

@ -414,6 +414,7 @@ nsXPConnect::Collect(PRUint32 reason, PRUint32 kind)
return;
JSContext *cx = ccx.GetJSContext();
JSRuntime *rt = GetRuntime()->GetJSRuntime();
// We want to scan the current thread for GC roots only if it was in a
// request prior to the Collect call to avoid false positives during the
@ -423,6 +424,7 @@ nsXPConnect::Collect(PRUint32 reason, PRUint32 kind)
js::AutoSkipConservativeScan ascs(cx);
MOZ_ASSERT(reason < js::gcreason::NUM_REASONS);
js::gcreason::Reason gcreason = (js::gcreason::Reason)reason;
js::PrepareForFullGC(rt);
if (kind == nsGCShrinking) {
js::ShrinkingGC(cx, gcreason);
} else if (kind == nsGCIncremental) {