Landing patch for bug 401687. Stop reference counting JS objects in the cycle collector. Patch by peterv@propagandism.org, r+sr=dbaron@mozilla.com,igor@mir2.org, a=dsicore@mozilla.com

This commit is contained in:
jst%mozilla.org 2007-11-01 22:51:59 +00:00
Родитель 3fec05fb5a
Коммит 52ecd87ffb
17 изменённых файлов: 967 добавлений и 555 удалений

Просмотреть файл

@ -2400,7 +2400,14 @@ nsXULElement::RecompileScriptEventListeners()
}
NS_IMPL_CYCLE_COLLECTION_CLASS(nsXULPrototypeNode)
NS_IMPL_CYCLE_COLLECTION_UNLINK_NATIVE_0(nsXULPrototypeNode)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_NATIVE(nsXULPrototypeNode)
if (tmp->mType == nsXULPrototypeNode::eType_Element) {
static_cast<nsXULPrototypeElement*>(tmp)->Unlink();
}
else if (tmp->mType == nsXULPrototypeNode::eType_Script) {
static_cast<nsXULPrototypeScript*>(tmp)->Unlink();
}
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NATIVE_BEGIN(nsXULPrototypeNode)
if (tmp->mType == nsXULPrototypeNode::eType_Element) {
nsXULPrototypeElement *elem =

Просмотреть файл

@ -1082,8 +1082,26 @@ nsJSContext::~nsJSContext()
nsCycleCollector_DEBUG_wasFreed(static_cast<nsIScriptContext*>(this));
#endif
NS_PRECONDITION(!mTerminations, "Shouldn't have termination funcs by now");
// Cope with JS_NewContext failure in ctor (XXXbe move NewContext to Init?)
Unlink();
--sContextCount;
if (!sContextCount && sDidShutdown) {
// The last context is being deleted, and we're already in the
// process of shutting down, release the JS runtime service, and
// the security manager.
NS_IF_RELEASE(sRuntimeService);
NS_IF_RELEASE(sSecurityManager);
NS_IF_RELEASE(gCollation);
NS_IF_RELEASE(gDecoder);
}
}
void
nsJSContext::Unlink()
{
if (!mContext)
return;
@ -1110,49 +1128,19 @@ nsJSContext::~nsJSContext()
} else {
::JS_DestroyContext(mContext);
}
--sContextCount;
if (!sContextCount && sDidShutdown) {
// The last context is being deleted, and we're already in the
// process of shutting down, release the JS runtime service, and
// the security manager.
NS_IF_RELEASE(sRuntimeService);
NS_IF_RELEASE(sSecurityManager);
NS_IF_RELEASE(gCollation);
NS_IF_RELEASE(gDecoder);
}
}
struct ContextCallbackItem : public JSTracer
{
nsCycleCollectionTraversalCallback *cb;
};
void
NoteContextChild(JSTracer *trc, void *thing, uint32 kind)
{
if (kind == JSTRACE_OBJECT || kind == JSTRACE_NAMESPACE ||
kind == JSTRACE_QNAME || kind == JSTRACE_XML) {
ContextCallbackItem *item = static_cast<ContextCallbackItem*>(trc);
item->cb->NoteScriptChild(JAVASCRIPT, thing);
}
mContext = nsnull;
}
// QueryInterface implementation for nsJSContext
NS_IMPL_CYCLE_COLLECTION_CLASS(nsJSContext)
// XXX Should we call ClearScope here?
NS_IMPL_CYCLE_COLLECTION_UNLINK_0(nsJSContext)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(nsJSContext)
NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mGlobalWrapperRef)
tmp->Unlink();
tmp->mIsInitialized = PR_FALSE;
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(nsJSContext)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mGlobalWrapperRef)
{
ContextCallbackItem trc;
trc.cb = &cb;
JS_TRACER_INIT(&trc, tmp->mContext, NoteContextChild);
js_TraceContext(&trc, tmp->mContext);
}
nsContentUtils::XPConnect()->NoteJSContext(tmp->mContext, cb);
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(nsJSContext)

Просмотреть файл

@ -212,6 +212,8 @@ protected:
JSObject **aRet);
private:
void Unlink();
JSContext *mContext;
PRUint32 mNumEvaluations;

Просмотреть файл

@ -93,6 +93,11 @@ nsJSEventListener::~nsJSEventListener()
NS_IMPL_CYCLE_COLLECTION_CLASS(nsJSEventListener)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(nsJSEventListener)
NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mTarget)
if (tmp->mContext) {
tmp->mScopeObject = nsnull;
NS_DROP_JS_OBJECTS(tmp, nsJSEventListener);
NS_IMPL_CYCLE_COLLECTION_UNLINK_NSCOMPTR(mContext)
}
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(nsJSEventListener)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_NSCOMPTR(mTarget)

Просмотреть файл

@ -848,10 +848,12 @@ JS_BeginRequest(JSContext *cx)
/* Indicate that a request is running. */
rt->requestCount++;
cx->requestDepth = 1;
cx->outstandingRequests++;
JS_UNLOCK_GC(rt);
return;
}
cx->requestDepth++;
cx->outstandingRequests++;
}
JS_PUBLIC_API(void)
@ -863,11 +865,13 @@ JS_EndRequest(JSContext *cx)
CHECK_REQUEST(cx);
JS_ASSERT(cx->requestDepth > 0);
JS_ASSERT(cx->outstandingRequests > 0);
if (cx->requestDepth == 1) {
/* Lock before clearing to interlock with ClaimScope, in jslock.c. */
rt = cx->runtime;
JS_LOCK_GC(rt);
cx->requestDepth = 0;
cx->outstandingRequests--;
/* See whether cx has any single-threaded scopes to start sharing. */
todop = &rt->scopeSharingTodo;
@ -908,6 +912,7 @@ JS_EndRequest(JSContext *cx)
}
cx->requestDepth--;
cx->outstandingRequests--;
}
/* Yield to pending GC operations, regardless of request depth */

Просмотреть файл

@ -742,6 +742,8 @@ struct JSContext {
#ifdef JS_THREADSAFE
JSThread *thread;
jsrefcount requestDepth;
/* Same as requestDepth but ignoring JS_SuspendRequest/JS_ResumeRequest */
jsrefcount outstandingRequests;
JSScope *scopeToShare; /* weak reference, see jslock.c */
JSScope *lockedSealedScope; /* weak ref, for low-cost sealed
scope locking */

Просмотреть файл

@ -65,6 +65,7 @@
native JSID(jsid);
[ptr] native voidPtrPtr(void*);
[ptr] native nsScriptObjectTracerPtr(nsScriptObjectTracer);
[ref] native nsCCTraversalCallbackRef(nsCycleCollectionTraversalCallback);
/***************************************************************************/
@ -150,6 +151,7 @@ interface nsIXPCSecurityManager;
interface nsIPrincipal;
%{C++
class nsCycleCollectionTraversalCallback;
class nsScriptObjectTracer;
%}
@ -443,7 +445,7 @@ interface nsIXPCFunctionThisTranslator : nsISupports
{ 0xbd, 0xd6, 0x0, 0x0, 0x64, 0x65, 0x73, 0x74 } }
%}
[uuid(3eb7f5fc-1325-43af-aead-6033162e04af)]
[uuid(9f45711b-bf4f-4af6-a71d-d165f042986d)]
interface nsIXPConnect : nsISupports
{
%{ C++
@ -741,4 +743,12 @@ interface nsIXPConnect : nsISupports
* @param aHolder The object that hold the rooted JS objects.
*/
[noscript] void removeJSHolder(in voidPtr aHolder);
/**
* Note aJSContext as a child to the cycle collector.
* @param aJSContext The JSContext to note.
* @param aCb The cycle collection traversal callback.
*/
[noscript,notxpcom] void noteJSContext(in JSContextPtr aJSContext,
in nsCCTraversalCallbackRef aCb);
};

Просмотреть файл

@ -77,8 +77,8 @@ nsXPConnect::nsXPConnect()
mDefaultSecurityManager(nsnull),
mDefaultSecurityManagerFlags(0),
mShuttingDown(JS_FALSE),
mObjRefcounts(nsnull),
mCycleCollectionContext(nsnull)
mCycleCollectionContext(nsnull),
mCycleCollecting(PR_FALSE)
{
// Ignore the result. If the runtime service is not ready to rumble
// then we'll set this up later as needed.
@ -87,6 +87,9 @@ nsXPConnect::nsXPConnect()
CallGetService(XPC_CONTEXT_STACK_CONTRACTID, &mContextStack);
nsCycleCollector_registerRuntime(nsIProgrammingLanguage::JAVASCRIPT, this);
#ifdef DEBUG_CC
mJSRoots.ops = nsnull;
#endif
#ifdef XPC_TOOLS_SUPPORT
{
@ -115,120 +118,11 @@ nsXPConnect::nsXPConnect()
}
#ifndef XPCONNECT_STANDALONE
typedef nsBaseHashtable<nsVoidPtrHashKey, nsISupports*, nsISupports*> ScopeSet;
#endif
static const PLDHashTableOps RefCountOps =
{
PL_DHashAllocTable,
PL_DHashFreeTable,
PL_DHashVoidPtrKeyStub,
PL_DHashMatchEntryStub,
PL_DHashMoveEntryStub,
PL_DHashClearEntryStub,
PL_DHashFinalizeStub,
nsnull
};
struct JSObjectRefcounts
{
PLDHashTable mRefCounts;
#ifndef XPCONNECT_STANDALONE
ScopeSet mScopes;
#endif
PRBool mMarkEnded;
struct ObjRefCount : public PLDHashEntryStub
{
PRUint32 mCount;
};
JSObjectRefcounts() : mMarkEnded(PR_FALSE)
{
mRefCounts.ops = nsnull;
#ifndef XPCONNECT_STANDALONE
mScopes.Init();
#endif
}
~JSObjectRefcounts()
{
NS_ASSERTION(!mRefCounts.ops,
"Didn't call PL_DHashTableFinish on mRefCounts?");
}
void Finish()
{
if(mRefCounts.ops) {
PL_DHashTableFinish(&mRefCounts);
mRefCounts.ops = nsnull;
}
#ifndef XPCONNECT_STANDALONE
mScopes.Clear();
#endif
}
void MarkStart()
{
if(mRefCounts.ops)
PL_DHashTableFinish(&mRefCounts);
if(!PL_DHashTableInit(&mRefCounts, &RefCountOps, nsnull,
sizeof(ObjRefCount), 65536))
mRefCounts.ops = nsnull;
mMarkEnded = PR_FALSE;
}
void MarkEnd()
{
mMarkEnded = PR_TRUE;
}
void Ref(void *obj)
{
if(!mRefCounts.ops)
return;
ObjRefCount *entry =
(ObjRefCount *)PL_DHashTableOperate(&mRefCounts, obj, PL_DHASH_ADD);
if(entry)
{
entry->key = obj;
++entry->mCount;
}
}
PRUint32 Get(void *obj)
{
PRUint32 count;
if(mRefCounts.ops)
{
PLDHashEntryHdr *entry =
PL_DHashTableOperate(&mRefCounts, obj, PL_DHASH_LOOKUP);
count = PL_DHASH_ENTRY_IS_BUSY(entry) ?
((ObjRefCount *)entry)->mCount :
0;
}
else
{
count = 0;
}
return count;
}
};
nsXPConnect::~nsXPConnect()
{
NS_ASSERTION(!mCycleCollectionContext,
"Didn't call FinishCycleCollection?");
nsCycleCollector_forgetRuntime(nsIProgrammingLanguage::JAVASCRIPT);
if (mObjRefcounts)
{
delete mObjRefcounts;
mObjRefcounts = NULL;
}
JSContext *cx = nsnull;
if (mRuntime) {
@ -514,67 +408,194 @@ nsXPConnect::GetInfoForName(const char * name, nsIInterfaceInfo** info)
}
static JSGCCallback gOldJSGCCallback;
// Number of collections that have collected nodes.
static PRUint32 gCollections;
// Whether to run cycle collection during GC.
static PRBool gCollect;
JS_STATIC_DLL_CALLBACK(JSBool)
XPCCycleGCCallback(JSContext *cx, JSGCStatus status)
XPCCycleCollectGCCallback(JSContext *cx, JSGCStatus status)
{
// Chain to old GCCallback first, we want to get all the mark notifications
// before recording the end of the mark phase.
JSBool ok = gOldJSGCCallback ? gOldJSGCCallback(cx, status) : JS_TRUE;
// Record the end of a mark phase. If we get more mark notifications then
// the GC has restarted and we'll need to clear the refcounts first.
// Launch the cycle collector.
if(status == JSGC_MARK_END)
nsXPConnect::GetXPConnect()->GetJSObjectRefcounts()->MarkEnd();
return ok;
}
void XPCMarkNotification(void *thing, uint8 flags, void *closure)
{
// XXX This can't deal with JS atoms yet, but probably should.
uint8 ty = flags & GCF_TYPEMASK;
if(ty == GCX_FUNCTION)
return;
JSObjectRefcounts* jsr = static_cast<JSObjectRefcounts*>(closure);
// We're marking after a mark phase ended, so the GC restarted itself and
// we want to clear the refcounts first.
if(jsr->mMarkEnded)
jsr->MarkStart();
jsr->Ref(thing);
}
nsresult
nsXPConnect::BeginCycleCollection()
{
if (!mObjRefcounts)
mObjRefcounts = new JSObjectRefcounts;
mObjRefcounts->MarkStart();
NS_ASSERTION(!mCycleCollectionContext,
"Didn't call FinishCycleCollection?");
mCycleCollectionContext = new XPCCallContext(NATIVE_CALLER);
if(!mCycleCollectionContext || !mCycleCollectionContext->IsValid())
{
delete mCycleCollectionContext;
mCycleCollectionContext = nsnull;
return NS_ERROR_FAILURE;
// This is the hook between marking and sweeping in the JS GC. Do cycle
// collection.
if(gCollect && nsCycleCollector_doCollect())
++gCollections;
else
// If cycle collection didn't collect anything we should stop
// collecting until the next call to nsXPConnect::Collect, even if
// there are more (nested) JS_GC calls.
gCollect = PR_FALSE;
// Mark JS objects that are held by XPCOM objects that are in cycles
// that will not be collected.
nsXPConnect::GetRuntime()->
TraceXPConnectRoots(cx->runtime->gcMarkingTracer);
}
return gOldJSGCCallback ? gOldJSGCCallback(cx, status) : JS_TRUE;
}
PRUint32
nsXPConnect::Collect()
{
// We're dividing JS objects into 2 categories:
//
// 1. "real" roots, held by the JS engine itself or rooted through the root
// and lock JS APIs. Roots from this category are considered black in the
// cycle collector, any cycle they participate in is uncollectable.
//
// 2. roots held by C++ objects that participate in cycle collection,
// held by XPConnect (see XPCJSRuntime::TraceXPConnectRoots). Roots from
// this category are considered grey in the cycle collector, their final
// color depends on the objects that hold them. It is thus very important
// to always traverse the objects that hold these objects during cycle
// collection (see XPCJSRuntime::AddXPConnectRoots).
//
// Note that if a root is in both categories it is the fact that it is in
// category 1 that takes precedence, so it will be considered black.
//
//
// We split up garbage collection into 3 phases (1, 3 and 4) and do cycle
// collection between the first 2 phases of garbage collection:
//
// 1. marking of the roots in category 1 by having the JS GC do its marking
// 2. cycle collection
// 3. marking of the roots in category 2 by
// XPCJSRuntime::TraceXPConnectRoots
// 4. sweeping of unmarked JS objects
//
// During cycle collection, marked JS objects (and the objects they hold)
// will be colored black. White objects holding roots from category 2 will
// be forgotten by XPConnect (in the unlink callback of the white objects).
// During phase 3 we'll only mark black objects holding JS objects (white
// objects were forgotten) and white JS objects will be swept during
// phase 4.
// Because splitting up the JS GC itself is hard, we're going to use a GC
// callback to do phase 2 and 3 after phase 1 has ended (see
// XPCCycleCollectGCCallback).
//
// If DEBUG_CC is not defined the cycle collector will not traverse roots
// from category 1 or any JS objects held by them. Any JS objects they hold
// will already be marked by the JS GC and will thus be colored black
// themselves. Any C++ objects they hold will have a missing (untraversed)
// edge from the JS object to the C++ object and so it will be marked black
// too. This decreases the number of objects that the cycle collector has to
// deal with.
// To improve debugging, if DEBUG_CC is defined all JS objects are
// traversed.
XPCCallContext cycleCollectionContext(NATIVE_CALLER);
if(!cycleCollectionContext.IsValid())
{
return PR_FALSE;
}
mCycleCollecting = PR_TRUE;
mCycleCollectionContext = &cycleCollectionContext;
gCollections = 0;
gCollect = PR_TRUE;
JSContext *cx = mCycleCollectionContext->GetJSContext();
gOldJSGCCallback = JS_SetGCCallback(cx, XPCCycleGCCallback);
JS_SetGCThingCallback(cx, XPCMarkNotification, mObjRefcounts);
gOldJSGCCallback = JS_SetGCCallback(cx, XPCCycleCollectGCCallback);
GetRuntime()->UnsetContextGlobals();
JS_GC(cx);
JS_SetGCThingCallback(cx, nsnull, nsnull);
GetRuntime()->RestoreContextGlobals();
JS_SetGCCallback(cx, gOldJSGCCallback);
gOldJSGCCallback = nsnull;
XPCWrappedNativeScope::SuspectAllWrappers(mRuntime);
mCycleCollectionContext = nsnull;
mCycleCollecting = PR_FALSE;
return gCollections;
}
#ifdef DEBUG_CC
struct NoteJSRootTracer : public JSTracer
{
NoteJSRootTracer(PLDHashTable *aObjects,
nsCycleCollectionTraversalCallback& cb)
: mObjects(aObjects),
mCb(cb)
{
}
PLDHashTable* mObjects;
nsCycleCollectionTraversalCallback& mCb;
};
JS_STATIC_DLL_CALLBACK(void)
NoteJSRoot(JSTracer *trc, void *thing, uint32 kind)
{
if(kind == JSTRACE_OBJECT || kind == JSTRACE_NAMESPACE ||
kind == JSTRACE_QNAME || kind == JSTRACE_XML)
{
NoteJSRootTracer *tracer = static_cast<NoteJSRootTracer*>(trc);
PLDHashEntryHdr *entry = PL_DHashTableOperate(tracer->mObjects, thing,
PL_DHASH_ADD);
if(entry && !reinterpret_cast<PLDHashEntryStub*>(entry)->key)
{
reinterpret_cast<PLDHashEntryStub*>(entry)->key = thing;
tracer->mCb.NoteRoot(nsIProgrammingLanguage::JAVASCRIPT, thing,
nsXPConnect::GetXPConnect());
}
}
}
#endif
nsresult
nsXPConnect::BeginCycleCollection(nsCycleCollectionTraversalCallback &cb)
{
#ifdef DEBUG_CC
NS_ASSERTION(!mJSRoots.ops, "Didn't call FinishCollection?");
if(!mCycleCollectionContext)
{
// Being called from nsCycleCollector::ExplainLiveExpectedGarbage.
mExplainCycleCollectionContext = new XPCCallContext(NATIVE_CALLER);
if(!mExplainCycleCollectionContext ||
!mExplainCycleCollectionContext->IsValid())
{
mExplainCycleCollectionContext = nsnull;
return PR_FALSE;
}
mCycleCollectionContext = mExplainCycleCollectionContext;
// Record all objects held by the JS runtime. This avoids doing a
// complete GC if we're just tracing to explain (from
// ExplainLiveExpectedGarbage), which makes the results of cycle
// collection identical for DEBUG_CC and non-DEBUG_CC builds.
if(!PL_DHashTableInit(&mJSRoots, PL_DHashGetStubOps(), nsnull,
sizeof(PLDHashEntryStub), PL_DHASH_MIN_SIZE)) {
mJSRoots.ops = nsnull;
return NS_ERROR_OUT_OF_MEMORY;
}
PRBool alreadyCollecting = mCycleCollecting;
mCycleCollecting = PR_TRUE;
NoteJSRootTracer trc(&mJSRoots, cb);
JS_TRACER_INIT(&trc, mCycleCollectionContext->GetJSContext(),
NoteJSRoot);
JS_TraceRuntime(&trc);
mCycleCollecting = alreadyCollecting;
}
#else
NS_ASSERTION(mCycleCollectionContext,
"Didn't call nsXPConnect::Collect()?");
#endif
GetRuntime()->AddXPConnectRoots(mCycleCollectionContext->GetJSContext(),
cb);
#ifndef XPCONNECT_STANDALONE
NS_ASSERTION(mObjRefcounts->mScopes.Count() == 0, "Didn't clear mScopes?");
if(!mScopes.IsInitialized())
{
mScopes.Init();
}
NS_ASSERTION(mScopes.Count() == 0, "Didn't clear mScopes?");
XPCWrappedNativeScope::TraverseScopes(*mCycleCollectionContext);
#endif
@ -585,36 +606,45 @@ nsXPConnect::BeginCycleCollection()
void
nsXPConnect::RecordTraversal(void *p, nsISupports *s)
{
mObjRefcounts->mScopes.Put(p, s);
mScopes.Put(p, s);
}
#endif
nsresult
nsXPConnect::FinishCycleCollection()
{
delete mCycleCollectionContext;
mCycleCollectionContext = nsnull;
if (mObjRefcounts)
mObjRefcounts->Finish();
#ifdef DEBUG_CC
if(mExplainCycleCollectionContext)
{
mCycleCollectionContext = nsnull;
mExplainCycleCollectionContext = nsnull;
}
#endif
#ifndef XPCONNECT_STANDALONE
mScopes.Clear();
#endif
#ifdef DEBUG_CC
if(mJSRoots.ops)
{
PL_DHashTableFinish(&mJSRoots);
mJSRoots.ops = nsnull;
}
#endif
return NS_OK;
}
nsCycleCollectionParticipant *
nsXPConnect::ToParticipant(void *p)
{
// Put this assertion here so it fires when we still have a stack
// showing where the bad pointer came from.
NS_ASSERTION(mObjRefcounts->Get(p) > 0,
"JS object but unknown to the JS GC?");
return this;
}
NS_IMETHODIMP
nsXPConnect::Root(void *p)
{
if(!mCycleCollectionContext || !JS_LockGCThing(*mCycleCollectionContext, p))
return NS_ERROR_FAILURE;
return NS_OK;
}
@ -631,62 +661,36 @@ nsXPConnect::PrintAllReferencesTo(void *p)
0x7fffffff, nsnull);
#endif
}
JS_STATIC_DLL_CALLBACK(JSDHashOperator)
SuspectWrappedJS(JSDHashTable *table, JSDHashEntryHdr *hdr,
uint32 number, void *arg)
{
for (nsXPCWrappedJS* wrapper = ((JSObject2WrappedJSMap::Entry*)hdr)->value;
wrapper; wrapper = wrapper->GetNextWrapper())
if (wrapper->IsValid() && !wrapper->IsSubjectToFinalization())
nsCycleCollector_suspectCurrent(
NS_CYCLE_COLLECTION_CLASSNAME(nsXPCWrappedJS)::Upcast(wrapper));
return JS_DHASH_NEXT;
}
void
nsXPConnect::SuspectExtraPointers()
{
// FIXME: We should really just call suspectCurrent on all the roots
// in the runtime, or even all the objects in the runtime, except we
// can't call suspectCurrent on JS objects.
GetRuntime(this)->GetWrappedJSMap()->Enumerate(SuspectWrappedJS, nsnull);
}
#endif
NS_IMETHODIMP
nsXPConnect::Unlink(void *p)
{
if(!mCycleCollectionContext)
return NS_ERROR_FAILURE;
uint8 ty = *js_GetGCThingFlags(p) & GCF_TYPEMASK;
if(ty == GCX_OBJECT)
JS_ClearScope(*mCycleCollectionContext, static_cast<JSObject*>(p));
return NS_OK;
}
NS_IMETHODIMP
nsXPConnect::Unroot(void *p)
{
if(!mCycleCollectionContext ||
!JS_UnlockGCThing(*mCycleCollectionContext, p))
return NS_ERROR_FAILURE;
return NS_OK;
}
struct ContextCallbackItem : public JSTracer
struct TraversalTracer : public JSTracer
{
nsCycleCollectionTraversalCallback *cb;
TraversalTracer(nsCycleCollectionTraversalCallback &aCb) : cb(aCb)
{
}
nsCycleCollectionTraversalCallback &cb;
};
void
JS_STATIC_DLL_CALLBACK(void)
NoteJSChild(JSTracer *trc, void *thing, uint32 kind)
{
if(kind == JSTRACE_OBJECT || kind == JSTRACE_NAMESPACE ||
kind == JSTRACE_QNAME || kind == JSTRACE_XML)
{
ContextCallbackItem *item = static_cast<ContextCallbackItem*>(trc);
item->cb->NoteScriptChild(nsIProgrammingLanguage::JAVASCRIPT, thing);
TraversalTracer *tracer = static_cast<TraversalTracer*>(trc);
tracer->cb.NoteScriptChild(nsIProgrammingLanguage::JAVASCRIPT, thing);
}
}
@ -698,7 +702,7 @@ static uint8 GCTypeToTraceKindMap[GCX_NTYPES] = {
JSTRACE_NAMESPACE, /* GCX_NAMESPACE */
JSTRACE_QNAME, /* GCX_QNAME */
JSTRACE_XML, /* GCX_XML */
(uint8)-1, /* unused */
(uint8)-1, /* unused */
JSTRACE_STRING, /* GCX_EXTERNAL_STRING + 0 */
JSTRACE_STRING, /* GCX_EXTERNAL_STRING + 1 */
JSTRACE_STRING, /* GCX_EXTERNAL_STRING + 2 */
@ -725,10 +729,22 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb)
JSContext *cx = mCycleCollectionContext->GetJSContext();
PRUint32 refcount = mObjRefcounts->Get(p);
NS_ASSERTION(refcount > 0, "JS object but unknown to the JS GC?");
uint8 ty = GetTraceKind(p);
CCNodeType type;
#ifdef DEBUG_CC
if(mJSRoots.ops)
{
PLDHashEntryHdr* entry =
PL_DHashTableOperate(&mJSRoots, p, PL_DHASH_LOOKUP);
type = PL_DHASH_ENTRY_IS_BUSY(entry) ? GCMarked : GCUnmarked;
}
else
{
type = JS_IsAboutToBeFinalized(cx, p) ? GCUnmarked : GCMarked;
}
if(ty == GCX_OBJECT)
{
JSObject *obj = static_cast<JSObject*>(p);
@ -807,10 +823,12 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb)
else if(clazz == &js_FunctionClass)
{
JSFunction* fun = (JSFunction*) JS_GetPrivate(cx, obj);
if(fun->atom)
if(fun->atom && ATOM_IS_STRING(fun->atom))
{
NS_ConvertUTF16toUTF8
fname(JS_GetStringChars(ATOM_TO_STRING(fun->atom)));
JS_snprintf(name, sizeof(name), "JS Object (Function - %s)",
js_AtomToPrintableString(cx, fun->atom));
fname.get());
}
else
{
@ -823,23 +841,31 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb)
}
}
cb.DescribeNode(refcount, sizeof(JSObject), name);
cb.DescribeNode(type, 0, sizeof(JSObject), name);
}
else
{
cb.DescribeNode(refcount, sizeof(JSObject), "JS Object");
cb.DescribeNode(type, 0, sizeof(JSObject), "JS Object");
}
#else
cb.DescribeNode(refcount);
type = JS_IsAboutToBeFinalized(cx, p) ? GCUnmarked : GCMarked;
cb.DescribeNode(type, 0);
#endif
uint8 ty = GetTraceKind(p);
if(ty != GCX_OBJECT && ty != GCX_NAMESPACE && ty != GCX_QNAME &&
ty != GCX_XML)
return NS_OK;
ContextCallbackItem trc;
trc.cb = &cb;
#ifndef DEBUG_CC
// There's no need to trace objects that have already been marked by the JS
// GC. Any JS objects hanging from them will already be marked. Only do this
// if DEBUG_CC is not defined, else we do want to know about all JS objects
// to get better graphs and explanations.
if(type == GCMarked)
return NS_OK;
#endif
TraversalTracer trc(cb);
JS_TRACER_INIT(&trc, cx, NoteJSChild);
JS_TraceChildren(&trc, p, GCTypeToTraceKindMap[ty]);
@ -874,7 +900,7 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb)
if(clazz->flags & JSCLASS_IS_GLOBAL)
{
nsISupports *principal = nsnull;
mObjRefcounts->mScopes.Get(obj, &principal);
mScopes.Get(obj, &principal);
cb.NoteXPCOMChild(principal);
}
#endif
@ -882,6 +908,80 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb)
return NS_OK;
}
PRInt32
nsXPConnect::GetRequestDepth(JSContext* cx)
{
PRInt32 requestDepth = cx->outstandingRequests;
XPCCallContext* context = GetCycleCollectionContext();
if(context && cx == context->GetJSContext())
// Ignore the request from the XPCCallContext we created for cycle
// collection.
--requestDepth;
return requestDepth;
}
class JSContextParticipant : public nsCycleCollectionParticipant
{
public:
NS_IMETHOD Root(void *n)
{
return NS_OK;
}
NS_IMETHOD Unlink(void *n)
{
// We must not unlink a JSContext because Root/Unroot don't ensure that
// the pointer is still valid.
return NS_OK;
}
NS_IMETHOD Unroot(void *n)
{
return NS_OK;
}
NS_IMETHODIMP Traverse(void *n, nsCycleCollectionTraversalCallback &cb)
{
JSContext *cx = static_cast<JSContext*>(n);
// Add cx->requestDepth to the refcount, if there are outstanding
// requests the context needs to be kept alive and adding unknown
// edges will ensure that any cycles this context is in won't be
// collected.
PRInt32 refCount = nsXPConnect::GetXPConnect()->GetRequestDepth(cx) + 1;
#ifdef DEBUG_CC
cb.DescribeNode(RefCounted, refCount, sizeof(JSContext),
"JSContext");
#else
cb.DescribeNode(RefCounted, refCount);
#endif
void* globalObject;
if(cx->globalObject)
globalObject = cx->globalObject;
else
globalObject = nsXPConnect::GetRuntime()->GetUnsetContextGlobal(cx);
cb.NoteScriptChild(nsIProgrammingLanguage::JAVASCRIPT, globalObject);
return NS_OK;
}
};
static JSContextParticipant JSContext_cycleCollectorGlobal;
// static
nsCycleCollectionParticipant*
nsXPConnect::JSContextParticipant()
{
return &JSContext_cycleCollectorGlobal;
}
NS_IMETHODIMP_(void)
nsXPConnect::NoteJSContext(JSContext *aJSContext,
nsCycleCollectionTraversalCallback &aCb)
{
aCb.NoteNativeChild(aJSContext, &JSContext_cycleCollectorGlobal);
}
/***************************************************************************/
/***************************************************************************/

Просмотреть файл

@ -318,7 +318,12 @@ void XPCJSRuntime::TraceJS(JSTracer* trc, void* data)
for(XPCRootSetElem *e = self->mObjectHolderRoots; e ; e = e->GetNextRoot())
static_cast<XPCJSObjectHolder*>(e)->TraceJS(trc);
self->TraceXPConnectRoots(trc);
if(self->GetXPConnect()->ShouldTraceRoots())
{
// Only trace these if we're not cycle-collecting, the cycle collector
// will do that if we are.
self->TraceXPConnectRoots(trc);
}
}
PR_STATIC_CALLBACK(void)
@ -343,8 +348,32 @@ TraceJSHolder(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number,
return JS_DHASH_NEXT;
}
struct ClearedGlobalObject : public JSDHashEntryHdr
{
JSContext* mContext;
JSObject* mGlobalObject;
};
void XPCJSRuntime::TraceXPConnectRoots(JSTracer *trc)
{
if(mClearedGlobalObjects.ops)
{
JSContext *iter = nsnull, *acx;
while((acx = JS_ContextIterator(GetJSRuntime(), &iter)))
{
JSDHashEntryHdr* entry =
JS_DHashTableOperate(&mClearedGlobalObjects, acx,
JS_DHASH_LOOKUP);
if(JS_DHASH_ENTRY_IS_BUSY(entry))
{
ClearedGlobalObject* clearedGlobal =
reinterpret_cast<ClearedGlobalObject*>(entry);
JS_CALL_OBJECT_TRACER(trc, clearedGlobal->mGlobalObject,
"global object");
}
}
}
XPCWrappedNativeScope::TraceJS(trc, this);
for(XPCRootSetElem *e = mVariantRoots; e ; e = e->GetNextRoot())
@ -357,6 +386,126 @@ void XPCJSRuntime::TraceXPConnectRoots(JSTracer *trc)
JS_DHashTableEnumerate(&mJSHolders, TraceJSHolder, trc);
}
JS_STATIC_DLL_CALLBACK(JSDHashOperator)
NoteJSHolder(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number,
void *arg)
{
ObjectHolder* entry = reinterpret_cast<ObjectHolder*>(hdr);
nsCycleCollectionTraversalCallback* cb =
static_cast<nsCycleCollectionTraversalCallback*>(arg);
cb->NoteRoot(nsIProgrammingLanguage::CPLUSPLUS, entry->holder,
entry->tracer);
return JS_DHASH_NEXT;
}
void XPCJSRuntime::AddXPConnectRoots(JSContext* cx,
nsCycleCollectionTraversalCallback &cb)
{
// For all JS objects that are held by native objects but aren't held
// through rooting or locking, we need to add all the native objects that
// hold them so that the JS objects are colored correctly in the cycle
// collector. This includes JSContexts that don't have outstanding requests,
// because their global object wasn't marked by the JS GC. All other JS
// roots were marked by the JS GC and will be colored correctly in the cycle
// collector.
JSContext *iter = nsnull, *acx;
while((acx = JS_ContextIterator(GetJSRuntime(), &iter)))
{
#ifndef DEBUG_CC
// Only skip JSContexts with outstanding requests if DEBUG_CC is not
// defined, else we do want to know about all JSContexts to get better
// graphs and explanations.
if(nsXPConnect::GetXPConnect()->GetRequestDepth(acx) != 0)
continue;
#endif
cb.NoteRoot(nsIProgrammingLanguage::CPLUSPLUS, acx,
nsXPConnect::JSContextParticipant());
}
XPCWrappedNativeScope::SuspectAllWrappers(this, cx, cb);
for(XPCRootSetElem *e = mVariantRoots; e ; e = e->GetNextRoot())
cb.NoteXPCOMRoot(static_cast<XPCTraceableVariant*>(e));
for(XPCRootSetElem *e = mWrappedJSRoots; e ; e = e->GetNextRoot())
{
nsIXPConnectWrappedJS *wrappedJS = static_cast<nsXPCWrappedJS*>(e);
cb.NoteXPCOMRoot(wrappedJS);
}
if(mJSHolders.ops)
JS_DHashTableEnumerate(&mJSHolders, NoteJSHolder, &cb);
}
void XPCJSRuntime::UnsetContextGlobals()
{
if(!JS_DHashTableInit(&mClearedGlobalObjects, JS_DHashGetStubOps(), nsnull,
sizeof(ClearedGlobalObject), JS_DHASH_MIN_SIZE))
{
mClearedGlobalObjects.ops = nsnull;
return;
}
JSContext *iter = nsnull, *acx;
while((acx = JS_ContextIterator(GetJSRuntime(), &iter)))
{
if(nsXPConnect::GetXPConnect()->GetRequestDepth(acx) == 0)
{
JS_ClearNewbornRoots(acx);
JSDHashEntryHdr* entry =
JS_DHashTableOperate(&mClearedGlobalObjects, acx, JS_DHASH_ADD);
ClearedGlobalObject* clearedGlobal =
reinterpret_cast<ClearedGlobalObject*>(entry);
if(clearedGlobal)
{
clearedGlobal->mContext = acx;
clearedGlobal->mGlobalObject = acx->globalObject;
acx->globalObject = nsnull;
}
}
}
}
void XPCJSRuntime::RestoreContextGlobals()
{
if(!mClearedGlobalObjects.ops)
return;
JSContext *iter = nsnull, *acx;
while((acx = JS_ContextIterator(GetJSRuntime(), &iter)))
{
JSDHashEntryHdr* entry =
JS_DHashTableOperate(&mClearedGlobalObjects, acx, JS_DHASH_LOOKUP);
if(JS_DHASH_ENTRY_IS_BUSY(entry))
{
ClearedGlobalObject* clearedGlobal =
reinterpret_cast<ClearedGlobalObject*>(entry);
acx->globalObject = clearedGlobal->mGlobalObject;
}
}
JS_DHashTableFinish(&mClearedGlobalObjects);
mClearedGlobalObjects.ops = nsnull;
}
JSObject* XPCJSRuntime::GetUnsetContextGlobal(JSContext* cx)
{
if(!mClearedGlobalObjects.ops)
return nsnull;
JSDHashEntryHdr* entry =
JS_DHashTableOperate(&mClearedGlobalObjects, cx, JS_DHASH_LOOKUP);
ClearedGlobalObject* clearedGlobal =
reinterpret_cast<ClearedGlobalObject*>(entry);
return JS_DHASH_ENTRY_IS_BUSY(entry) ?
clearedGlobal->mGlobalObject :
nsnull;
}
// static
JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status)
{
@ -939,6 +1088,7 @@ XPCJSRuntime::XPCJSRuntime(nsXPConnect* aXPConnect,
if(!JS_DHashTableInit(&mJSHolders, JS_DHashGetStubOps(), nsnull,
sizeof(ObjectHolder), 512))
mJSHolders.ops = nsnull;
mClearedGlobalObjects.ops = nsnull;
// Install a JavaScript 'debugger' keyword handler in debug builds only
#ifdef DEBUG

Просмотреть файл

@ -109,6 +109,8 @@
#include "nsIProperty.h"
#include "nsSupportsArray.h"
#include "nsTArray.h"
#include "nsBaseHashtable.h"
#include "nsHashKeys.h"
#include "nsIXPCScriptNotify.h" // used to notify: ScriptEvaluated
@ -426,12 +428,11 @@ private:
const PRBool OBJ_IS_GLOBAL = PR_TRUE;
const PRBool OBJ_IS_NOT_GLOBAL = PR_FALSE;
struct JSObjectRefcounts;
class nsXPConnect : public nsIXPConnect,
public nsIThreadObserver,
public nsSupportsWeakReference,
public nsCycleCollectionLanguageRuntime,
public nsCycleCollectionJSRuntime,
public nsCycleCollectionParticipant
{
public:
@ -489,18 +490,34 @@ public:
nsCycleCollectionTraversalCallback &cb);
// nsCycleCollectionLanguageRuntime
virtual nsresult BeginCycleCollection();
virtual nsresult BeginCycleCollection(nsCycleCollectionTraversalCallback &cb);
virtual nsresult FinishCycleCollection();
virtual nsCycleCollectionParticipant *ToParticipant(void *p);
virtual PRUint32 Collect();
#ifdef DEBUG_CC
virtual void PrintAllReferencesTo(void *p);
virtual void SuspectExtraPointers();
#endif
JSObjectRefcounts* GetJSObjectRefcounts() {return mObjRefcounts;}
// We should not trace XPConnect JS roots when tracing the graph for the
// cycle collector. Those should be traced from the XPCOM objects that hold
// them when we know that they won't be collected by the cycle collector.
PRBool ShouldTraceRoots()
{
return !mCycleCollecting;
}
static uint8 GetTraceKind(void *thing);
XPCCallContext* GetCycleCollectionContext()
{
return mCycleCollectionContext;
}
PRInt32 GetRequestDepth(JSContext* cx);
// This returns the singleton nsCycleCollectionParticipant for JSContexts.
static nsCycleCollectionParticipant *JSContextParticipant();
#ifndef XPCONNECT_STANDALONE
void RecordTraversal(void *p, nsISupports *s);
#endif
@ -529,14 +546,22 @@ private:
nsIXPCSecurityManager* mDefaultSecurityManager;
PRUint16 mDefaultSecurityManagerFlags;
JSBool mShuttingDown;
JSObjectRefcounts* mObjRefcounts;
XPCCallContext* mCycleCollectionContext;
#ifdef DEBUG_CC
nsAutoPtr<XPCCallContext> mExplainCycleCollectionContext;
PLDHashTable mJSRoots;
#endif
PRBool mCycleCollecting;
#ifdef XPC_TOOLS_SUPPORT
nsCOMPtr<nsIXPCToolsProfiler> mProfiler;
nsCOMPtr<nsILocalFile> mProfilerOutputFile;
#endif
#ifndef XPCONNECT_STANDALONE
typedef nsBaseHashtable<nsVoidPtrHashKey, nsISupports*, nsISupports*> ScopeSet;
ScopeSet mScopes;
#endif
};
/***************************************************************************/
@ -692,6 +717,10 @@ public:
nsresult AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer);
nsresult RemoveJSHolder(void* aHolder);
void UnsetContextGlobals();
void RestoreContextGlobals();
JSObject* GetUnsetContextGlobal(JSContext* cx);
void DebugDump(PRInt16 depth);
void SystemIsBeingShutDown(JSContext* cx);
@ -756,6 +785,7 @@ private:
XPCRootSetElem *mWrappedJSRoots;
XPCRootSetElem *mObjectHolderRoots;
JSDHashTable mJSHolders;
JSDHashTable mClearedGlobalObjects;
};
/***************************************************************************/
@ -1162,7 +1192,8 @@ public:
TraceJS(JSTracer* trc, XPCJSRuntime* rt);
static void
SuspectAllWrappers(XPCJSRuntime* rt);
SuspectAllWrappers(XPCJSRuntime* rt, JSContext* cx,
nsCycleCollectionTraversalCallback &cb);
static void
FinishedMarkPhaseOfGC(JSContext* cx, XPCJSRuntime* rt);
@ -2156,6 +2187,8 @@ public:
JSObject* GetWrapper() { return mWrapper; }
void SetWrapper(JSObject *obj) { mWrapper = obj; }
void NoteTearoffs(nsCycleCollectionTraversalCallback& cb);
// Make ctor and dtor protected (rather than private) to placate nsCOMPtr.
protected:
XPCWrappedNative(); // not implemented
@ -2407,6 +2440,8 @@ protected:
nsXPCWrappedJS* root,
nsISupports* aOuter);
void Unlink();
private:
JSObject* mJSObj;
nsXPCWrappedJSClass* mClass;

Просмотреть файл

@ -97,11 +97,16 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(XPCVariant)
nsVariant::Traverse(tmp->mData, cb);
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
// NB: We might unlink our outgoing references in the future; for now we do
// nothing. This is a harmless conservative behavior; it just means that we rely
// on the cycle being broken by some of the external XPCOM objects' unlink()
// methods, not our own. Typically *any* unlinking will break the cycle.
NS_IMPL_CYCLE_COLLECTION_UNLINK_0(XPCVariant)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(XPCVariant)
if(!JSVAL_IS_STRING(tmp->mJSVal))
nsVariant::Cleanup(&tmp->mData);
if(JSVAL_IS_TRACEABLE(tmp->mJSVal))
{
XPCTraceableVariant *v = static_cast<XPCTraceableVariant*>(tmp);
v->RemoveFromRootSet(nsXPConnect::GetRuntime()->GetJSRuntime());
}
tmp->mJSVal = JSVAL_NULL;
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
// static
XPCVariant* XPCVariant::newVariant(XPCCallContext& ccx, jsval aJSVal)

Просмотреть файл

@ -86,9 +86,9 @@ NS_CYCLE_COLLECTION_CLASSNAME(nsXPCWrappedJS)::Traverse
char name[72];
JS_snprintf(name, sizeof(name), "nsXPCWrappedJS (%s)",
tmp->GetClass()->GetInterfaceName());
cb.DescribeNode(refcnt, sizeof(nsXPCWrappedJS), name);
cb.DescribeNode(RefCounted, refcnt, sizeof(nsXPCWrappedJS), name);
#else
cb.DescribeNode(refcnt);
cb.DescribeNode(RefCounted, refcnt);
#endif
// nsXPCWrappedJS keeps its own refcount artificially at or above 1, see the
@ -112,16 +112,19 @@ NS_CYCLE_COLLECTION_CLASSNAME(nsXPCWrappedJS)::Traverse
return NS_OK;
}
NS_IMETHODIMP
NS_CYCLE_COLLECTION_CLASSNAME(nsXPCWrappedJS)::Unlink(void *p)
{
// NB: We might unlink our outgoing references in the future; for
// now we do nothing. This is a harmless conservative behavior; it
// just means that we rely on the cycle being broken by some of
// the external XPCOM objects' unlink() methods, not our
// own. Typically *any* unlinking will break the cycle.
return NS_OK;
}
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(nsXPCWrappedJS)
if(tmp->mRoot && !tmp->mRoot->HasWeakReferences())
{
tmp->Unlink();
if(tmp->IsValid())
{
XPCJSRuntime* rt = nsXPConnect::GetRuntime();
if(tmp->mRefCnt > 1)
tmp->RemoveFromRootSet(rt->GetJSRuntime());
tmp->mJSObj = nsnull;
}
}
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMETHODIMP
nsXPCWrappedJS::AggregatedQueryInterface(REFNSIID aIID, void** aInstancePtr)
@ -211,7 +214,6 @@ nsXPCWrappedJS::QueryInterface(REFNSIID aIID, void** aInstancePtr)
nsrefcnt
nsXPCWrappedJS::AddRef(void)
{
NS_PRECONDITION(mRoot, "bad root");
nsrefcnt cnt = (nsrefcnt) PR_AtomicIncrement((PRInt32*)&mRefCnt);
NS_LOG_ADDREF(this, cnt, "nsXPCWrappedJS", sizeof(*this));
@ -227,13 +229,8 @@ nsXPCWrappedJS::AddRef(void)
nsrefcnt
nsXPCWrappedJS::Release(void)
{
NS_PRECONDITION(mRoot, "bad root");
NS_PRECONDITION(0 != mRefCnt, "dup release");
#ifdef DEBUG_jband
NS_ASSERTION(IsValid(), "post xpconnect shutdown call of nsXPCWrappedJS::Release");
#endif
do_decrement:
nsrefcnt cnt = (nsrefcnt) PR_AtomicDecrement((PRInt32*)&mRefCnt);
@ -452,8 +449,18 @@ nsXPCWrappedJS::~nsXPCWrappedJS()
{
NS_PRECONDITION(0 == mRefCnt, "refcounting error");
XPCJSRuntime* rt = nsXPConnect::GetRuntime();
if(mRoot == this)
{
// Let the nsWeakReference object (if present) know of our demise.
ClearWeakReferences();
}
Unlink();
}
void
nsXPCWrappedJS::Unlink()
{
XPCJSRuntime* rt = nsXPConnect::GetRuntime();
if(mRoot != this)
{
// unlink this wrapper
@ -473,11 +480,6 @@ nsXPCWrappedJS::~nsXPCWrappedJS()
}
else
{
NS_ASSERTION(!mNext, "root wrapper with non-empty chain being deleted");
// Let the nsWeakReference object (if present) know of our demise.
ClearWeakReferences();
// remove this root wrapper from the map
if(rt)
{

Просмотреть файл

@ -67,9 +67,10 @@ NS_CYCLE_COLLECTION_CLASSNAME(XPCWrappedNative)::Traverse(void *p,
else
JS_snprintf(name, sizeof(name), "XPCWrappedNative");
cb.DescribeNode(tmp->mRefCnt.get(), sizeof(XPCWrappedNative), name);
cb.DescribeNode(RefCounted, tmp->mRefCnt.get(), sizeof(XPCWrappedNative),
name);
#else
cb.DescribeNode(tmp->mRefCnt.get());
cb.DescribeNode(RefCounted, tmp->mRefCnt.get());
#endif
if (tmp->mRefCnt.get() > 1) {
@ -93,20 +94,39 @@ NS_CYCLE_COLLECTION_CLASSNAME(XPCWrappedNative)::Traverse(void *p,
// XPCWrappedNative keeps its native object alive.
cb.NoteXPCOMChild(tmp->GetIdentityObject());
tmp->NoteTearoffs(cb);
return NS_OK;
}
NS_IMETHODIMP
NS_CYCLE_COLLECTION_CLASSNAME(XPCWrappedNative)::Unlink(void *p)
void
XPCWrappedNative::NoteTearoffs(nsCycleCollectionTraversalCallback& cb)
{
// NB: We might unlink our outgoing references in the future; for
// now we do nothing. This is a harmless conservative behavior; it
// just means that we rely on the cycle being broken by some of
// the external XPCOM objects' unlink() methods, not our
// own. Typically *any* unlinking will break the cycle.
return NS_OK;
// Tearoffs hold their native object alive. If their JS object hasn't been
// finalized yet we'll note the edge between the JS object and the native
// (see nsXPConnect::Traverse), but if their JS object has been finalized
// then the tearoff is only reachable through the XPCWrappedNative, so we
// record an edge here.
XPCWrappedNativeTearOffChunk* chunk;
for(chunk = &mFirstChunk; chunk; chunk = chunk->mNextChunk)
{
XPCWrappedNativeTearOff* to = chunk->mTearOffs;
for(int i = XPC_WRAPPED_NATIVE_TEAROFFS_PER_CHUNK-1; i >= 0; i--, to++)
{
JSObject* jso = to->GetJSObject();
if(!jso)
{
cb.NoteXPCOMChild(to->GetNative());
}
}
}
}
// No need to unlink the JS objects, if the XPCWrappedNative will be cycle
// collected then its mFlatJSObject will be cycle collected too and finalization
// of the mFlatJSObject will unlink the js objects (see
// XPC_WN_NoHelper_Finalize and FlatJSObjectFinalized).
NS_IMPL_CYCLE_COLLECTION_UNLINK_0(XPCWrappedNative)
#ifdef XPC_CHECK_CLASSINFO_CLAIMS
static void DEBUG_CheckClassInfoClaims(XPCWrappedNative* wrapper);

Просмотреть файл

@ -309,17 +309,33 @@ XPCWrappedNativeScope::TraceJS(JSTracer* trc, XPCJSRuntime* rt)
}
}
struct SuspectClosure
{
JSContext* cx;
nsCycleCollectionTraversalCallback& cb;
};
JS_STATIC_DLL_CALLBACK(JSDHashOperator)
WrappedNativeSuspecter(JSDHashTable *table, JSDHashEntryHdr *hdr,
uint32 number, void *arg)
{
SuspectClosure* closure = static_cast<SuspectClosure*>(arg);
XPCWrappedNative* wrapper = ((Native2WrappedNativeMap::Entry*)hdr)->value;
XPCWrappedNativeProto* proto = wrapper->GetProto();
if(proto && proto->ClassIsMainThreadOnly())
if(proto && proto->ClassIsMainThreadOnly() && wrapper->IsValid())
{
NS_ASSERTION(NS_IsMainThread(),
"Suspecting wrapped natives from non-main thread");
nsCycleCollector_suspectCurrent(wrapper);
#ifndef DEBUG_CC
// Only record objects that might be part of a cycle as roots.
if(!JS_IsAboutToBeFinalized(closure->cx, wrapper->GetFlatJSObject()))
return JS_DHASH_NEXT;
#endif
closure->cb.NoteRoot(nsIProgrammingLanguage::JAVASCRIPT,
wrapper->GetFlatJSObject(),
nsXPConnect::GetXPConnect());
}
return JS_DHASH_NEXT;
@ -327,14 +343,15 @@ WrappedNativeSuspecter(JSDHashTable *table, JSDHashEntryHdr *hdr,
// static
void
XPCWrappedNativeScope::SuspectAllWrappers(XPCJSRuntime* rt)
XPCWrappedNativeScope::SuspectAllWrappers(XPCJSRuntime* rt, JSContext* cx,
nsCycleCollectionTraversalCallback& cb)
{
XPCAutoLock lock(rt->GetMapLock());
// Do nsCycleCollector_suspectCurrent for all wrapped natives.
SuspectClosure closure = { cx, cb };
for(XPCWrappedNativeScope* cur = gScopes; cur; cur = cur->mNext)
{
cur->mWrappedNativeMap->Enumerate(WrappedNativeSuspecter, nsnull);
cur->mWrappedNativeMap->Enumerate(WrappedNativeSuspecter, &closure);
}
}

Просмотреть файл

@ -436,8 +436,7 @@ struct PtrInfo
nsCycleCollectionParticipant *mParticipant;
PRUint32 mColor : 2;
PRUint32 mInternalRefs : 30;
PRUint32 mRefCount : 31;
PRUint32 mWasPurple : 1;
PRUint32 mRefCount;
EdgePool::Iterator mFirstChild; // first
EdgePool::Iterator mLastChild; // one after last
@ -464,7 +463,6 @@ struct PtrInfo
mColor(grey),
mInternalRefs(0),
mRefCount(0),
mWasPurple(PR_FALSE),
mFirstChild(),
mLastChild()
#ifdef DEBUG_CC
@ -600,6 +598,7 @@ private:
struct GCGraph;
class GCGraphBuilder;
static GCGraph *sCurrGraph = nsnull;
@ -626,8 +625,10 @@ typedef nsTHashtable<nsVoidPtrHashKey> PointerSet;
typedef nsBaseHashtable<nsVoidPtrHashKey, PRUint32, PRUint32>
PointerSetWithGeneration;
#ifdef DEBUG_CC
static void
WriteGraph(FILE *stream, GCGraph &graph, const void *redPtr);
#endif
struct nsPurpleBuffer
{
@ -817,7 +818,7 @@ nsPurpleBuffer::SelectAgedPointers(nsDeque *transferBuffer)
struct nsCycleCollectionXPCOMRuntime :
public nsCycleCollectionLanguageRuntime
{
nsresult BeginCycleCollection()
nsresult BeginCycleCollection(nsCycleCollectionTraversalCallback &cb)
{
return NS_OK;
}
@ -831,7 +832,6 @@ struct nsCycleCollectionXPCOMRuntime :
#ifdef DEBUG_CC
virtual void PrintAllReferencesTo(void *p) {}
virtual void SuspectExtraPointers() {}
#endif
};
@ -839,6 +839,7 @@ struct nsCycleCollector
{
PRBool mCollectionInProgress;
PRBool mScanInProgress;
PRBool mFollowupCollection;
nsCycleCollectionLanguageRuntime *mRuntimes[nsIProgrammingLanguage::MAX+1];
nsCycleCollectionXPCOMRuntime mXPCOMRuntime;
@ -858,18 +859,17 @@ struct nsCycleCollector
void ForgetRuntime(PRUint32 langID);
void SelectPurple();
void MarkRoots(GCGraph &graph);
void MarkRoots(GCGraph &graph, GCGraphBuilder &builder);
void ScanRoots(GCGraph &graph);
PRBool CollectWhite(GCGraph &graph); // returns whether anything collected
nsCycleCollector();
~nsCycleCollector();
PRBool Suspect(nsISupports *n, PRBool current = PR_FALSE);
PRBool Suspect(nsISupports *n);
PRBool Forget(nsISupports *n);
void Allocated(void *n, size_t sz);
void Freed(void *n);
PRBool Collect(PRUint32 aTryCollections = 1);
PRBool DoCollect();
void Shutdown();
#ifdef DEBUG_CC
@ -878,6 +878,8 @@ struct nsCycleCollector
FILE *mPtrLog;
void MaybeDrawGraphs(GCGraph &graph);
void Allocated(void *n, size_t sz);
void Freed(void *n);
void ExplainLiveExpectedGarbage();
PRBool CreateReversedEdges(GCGraph &graph);
@ -1103,7 +1105,7 @@ static PLDHashTableOps PtrNodeOps = {
nsnull
};
class GCGraphBuilder : private nsCycleCollectionTraversalCallback
class GCGraphBuilder : public nsCycleCollectionTraversalCallback
{
private:
NodePool::Builder mNodeBuilder;
@ -1135,10 +1137,14 @@ public:
private:
// nsCycleCollectionTraversalCallback methods.
#ifdef DEBUG_CC
NS_IMETHOD_(void) DescribeNode(nsrefcnt refCount, size_t objSz, const char *objName);
NS_IMETHOD_(void) DescribeNode(CCNodeType type, nsrefcnt refCount,
size_t objSz, const char *objName);
#else
NS_IMETHOD_(void) DescribeNode(nsrefcnt refCount);
NS_IMETHOD_(void) DescribeNode(CCNodeType type, nsrefcnt refCount);
#endif
NS_IMETHOD_(void) NoteXPCOMRoot(nsISupports *root);
NS_IMETHOD_(void) NoteRoot(PRUint32 langID, void *child,
nsCycleCollectionParticipant* participant);
NS_IMETHOD_(void) NoteXPCOMChild(nsISupports *child);
NS_IMETHOD_(void) NoteNativeChild(void *child,
nsCycleCollectionParticipant *participant);
@ -1210,10 +1216,44 @@ GCGraphBuilder::Traverse(PtrInfo* aPtrInfo)
}
NS_IMETHODIMP_(void)
GCGraphBuilder::NoteXPCOMRoot(nsISupports *root)
{
root = canonicalize(root);
NS_ASSERTION(root,
"Don't add objects that don't participate in collection!");
#ifdef DEBUG_CC
GCGraphBuilder::DescribeNode(nsrefcnt refCount, size_t objSz, const char *objName)
if (nsCycleCollector_shouldSuppress(root))
return;
#endif
nsXPCOMCycleCollectionParticipant *cp;
ToParticipant(root, &cp);
NoteRoot(nsIProgrammingLanguage::CPLUSPLUS, root, cp);
}
NS_IMETHODIMP_(void)
GCGraphBuilder::NoteRoot(PRUint32 langID, void *root,
nsCycleCollectionParticipant* participant)
{
NS_ASSERTION(root, "Don't add a null root!");
if (langID > nsIProgrammingLanguage::MAX || !mRuntimes[langID]) {
Fault("adding root for unregistered language", root);
return;
}
AddNode(root, participant, langID);
}
NS_IMETHODIMP_(void)
#ifdef DEBUG_CC
GCGraphBuilder::DescribeNode(CCNodeType type, nsrefcnt refCount,
size_t objSz, const char *objName)
#else
GCGraphBuilder::DescribeNode(nsrefcnt refCount)
GCGraphBuilder::DescribeNode(CCNodeType type, nsrefcnt refCount)
#endif
{
#ifdef DEBUG_CC
@ -1221,10 +1261,15 @@ GCGraphBuilder::DescribeNode(nsrefcnt refCount)
mCurrPi->mName = PL_strdup(objName);
#endif
if (refCount == 0)
Fault("zero refcount", mCurrPi);
if (type == RefCounted) {
if (refCount == 0 || refCount == PR_UINT32_MAX)
Fault("zero or overflowing refcount", mCurrPi);
mCurrPi->mRefCount = refCount;
mCurrPi->mRefCount = refCount;
}
else {
mCurrPi->mRefCount = type == GCMarked ? PR_UINT32_MAX : 0;
}
#ifdef DEBUG_CC
sCollector->mStats.mVisitedNode++;
#endif
@ -1299,13 +1344,8 @@ nsCycleCollector::SelectPurple()
}
void
nsCycleCollector::MarkRoots(GCGraph &graph)
nsCycleCollector::MarkRoots(GCGraph &graph, GCGraphBuilder &builder)
{
if (mBuf.GetSize() == 0)
return;
GCGraphBuilder builder(graph, mRuntimes);
int i;
for (i = 0; i < mBuf.GetSize(); ++i) {
nsISupports *s = static_cast<nsISupports *>(mBuf.ObjectAt(i));
@ -1314,8 +1354,26 @@ nsCycleCollector::MarkRoots(GCGraph &graph)
if (cp) {
PtrInfo *pinfo = builder.AddNode(canonicalize(s), cp,
nsIProgrammingLanguage::CPLUSPLUS);
if (pinfo)
pinfo->mWasPurple = PR_TRUE;
if (pinfo) {
cp->UnmarkPurple(s);
#ifdef DEBUG_CC
mStats.mForgetNode++;
#ifndef __MINGW32__
if (mParams.mHookMalloc)
InitMemHook();
#endif
if (mParams.mLogPointers) {
if (!mPtrLog)
mPtrLog = fopen("pointer_log", "w");
fprintf(mPtrLog, "F %p\n", static_cast<void*>(s));
}
#endif
mPurpleBuf.Remove(s);
}
}
}
@ -1343,7 +1401,7 @@ struct ScanBlackWalker : public GraphWalker
}
void VisitNode(PtrInfo *pi)
{
{
pi->mColor = black;
#ifdef DEBUG_CC
sCollector->mStats.mSetColorBlack++;
@ -1361,13 +1419,10 @@ struct scanWalker : public GraphWalker
void VisitNode(PtrInfo *pi)
{
if (pi->mColor != grey)
Fault("scanning non-grey node", pi);
if (pi->mInternalRefs > pi->mRefCount)
if (pi->mInternalRefs > pi->mRefCount && pi->mRefCount > 0)
Fault("traversed refs exceed refcount", pi);
if (pi->mInternalRefs == pi->mRefCount) {
if (pi->mInternalRefs == pi->mRefCount || pi->mRefCount == 0) {
pi->mColor = white;
#ifdef DEBUG_CC
sCollector->mStats.mSetColorWhite++;
@ -1437,31 +1492,8 @@ nsCycleCollector::CollectWhite(GCGraph &graph)
while (!etor.IsDone())
{
PtrInfo *pinfo = etor.GetNext();
void *p = pinfo->mPointer;
if (pinfo->mColor == white) {
mBuf.Push(pinfo);
if (pinfo->mWasPurple) {
nsISupports* s = static_cast<nsISupports*>(p);
PRBool forgetResult = Forget(s);
NS_ASSERTION(forgetResult, "Forget failed");
}
}
else if (pinfo->mWasPurple) {
nsISupports* s = static_cast<nsISupports*>(p);
nsXPCOMCycleCollectionParticipant* cp =
static_cast<nsXPCOMCycleCollectionParticipant*>
(pinfo->mParticipant);
#ifdef DEBUG
nsXPCOMCycleCollectionParticipant* checkcp;
CallQueryInterface(s, &checkcp);
NS_ASSERTION(checkcp == cp,
"QI should return the same participant!");
#endif
cp->UnmarkPurple(s);
PRBool forgetResult = Forget(s);
NS_ASSERTION(forgetResult, "Forget failed");
}
}
@ -1778,12 +1810,17 @@ WriteGraph(FILE *stream, GCGraph &graph, const void *redPtr)
PtrInfo *pi = etor.GetNext();
const void *p = pi->mPointer;
fprintf(stream,
"n%p [label=\"%s\\n%p\\n%u/%u refs found\", "
"fillcolor=%s, fontcolor=%s]\n",
"n%p [label=\"%s\\n%p\\n",
p,
pi->mName,
p,
pi->mInternalRefs, pi->mRefCount,
p);
if (pi->mRefCount != 0 && pi->mRefCount != PR_UINT32_MAX) {
fprintf(stream,
"%u/%u refs found",
pi->mInternalRefs, pi->mRefCount);
}
fprintf(stream,
"\", fillcolor=%s, fontcolor=%s]\n",
(redPtr && redPtr == p ? "red" : (pi->mColor == black ? "black" : "white")),
(pi->mColor == black ? "white" : "black"));
for (EdgePool::Iterator child = pi->mFirstChild,
@ -1876,11 +1913,15 @@ public:
return mSuppressThisNode;
}
NS_IMETHOD_(void) DescribeNode(nsrefcnt refCount, size_t objSz, const char *objName)
NS_IMETHOD_(void) DescribeNode(CCNodeType type, nsrefcnt refCount,
size_t objSz, const char *objName)
{
mSuppressThisNode = (PL_strstr(sSuppressionList, objName) != nsnull);
}
NS_IMETHOD_(void) NoteXPCOMRoot(nsISupports *root) {};
NS_IMETHOD_(void) NoteRoot(PRUint32 langID, void *root,
nsCycleCollectionParticipant* participant) {};
NS_IMETHOD_(void) NoteXPCOMChild(nsISupports *child) {}
NS_IMETHOD_(void) NoteScriptChild(PRUint32 langID, void *child) {}
NS_IMETHOD_(void) NoteNativeChild(void *child,
@ -1913,7 +1954,7 @@ nsCycleCollector_isScanSafe(nsISupports *s)
#endif
PRBool
nsCycleCollector::Suspect(nsISupports *n, PRBool current)
nsCycleCollector::Suspect(nsISupports *n)
{
// Re-entering ::Suspect during collection used to be a fault, but
// we are canonicalizing nsISupports pointers using QI, so we will
@ -1947,10 +1988,7 @@ nsCycleCollector::Suspect(nsISupports *n, PRBool current)
}
#endif
if (current)
mBuf.Push(n);
else
mPurpleBuf.Put(n);
mPurpleBuf.Put(n);
return PR_TRUE;
}
@ -2024,7 +2062,6 @@ nsCycleCollector::Freed(void *n)
PRBool
nsCycleCollector::Collect(PRUint32 aTryCollections)
{
PRBool didCollect = PR_FALSE;
#if defined(DEBUG_CC) && !defined(__MINGW32__)
if (!mParams.mDoNothing && mParams.mHookMalloc)
InitMemHook();
@ -2032,11 +2069,11 @@ nsCycleCollector::Collect(PRUint32 aTryCollections)
// This can legitimately happen in a few cases. See bug 383651.
if (mCollectionInProgress)
return didCollect;
return PR_FALSE;
#ifdef COLLECT_TIME_DEBUG
printf("cc: Starting nsCycleCollector::Collect(%d)\n", aTryCollections);
PRTime start = PR_Now(), now;
PRTime start = PR_Now();
#endif
mCollectionInProgress = PR_TRUE;
@ -2047,146 +2084,23 @@ nsCycleCollector::Collect(PRUint32 aTryCollections)
obs->NotifyObservers(nsnull, "cycle-collector-begin", nsnull);
}
#ifdef DEBUG_CC
PRUint32 origTryCollections = aTryCollections;
#endif
mFollowupCollection = PR_FALSE;
while (aTryCollections > 0) {
// This triggers a JS GC. Our caller assumes we always trigger at
// least one JS GC -- they rely on this fact to avoid redundant JS
// GC calls -- so it's essential that we actually execute this
// step!
//
// It is also essential to empty mBuf here because starting up
// collection in language runtimes may force some "current" suspects
// into mBuf.
mBuf.Empty();
#ifdef COLLECT_TIME_DEBUG
now = PR_Now();
#endif
for (PRUint32 i = 0; i <= nsIProgrammingLanguage::MAX; ++i) {
if (mRuntimes[i])
mRuntimes[i]->BeginCycleCollection();
PRUint32 totalCollections = 0;
while (aTryCollections > totalCollections) {
PRUint32 collections = 0;
if (mRuntimes[nsIProgrammingLanguage::JAVASCRIPT]) {
collections = static_cast<nsCycleCollectionJSRuntime*>
(mRuntimes[nsIProgrammingLanguage::JAVASCRIPT])->Collect();
}
else {
collections = DoCollect() ? 1 : 0;
}
#ifdef COLLECT_TIME_DEBUG
printf("cc: mRuntimes[*]->BeginCycleCollection() took %lldms\n",
(PR_Now() - now) / PR_USEC_PER_MSEC);
#endif
if (mParams.mDoNothing) {
aTryCollections = 0;
} else {
#ifdef COLLECT_TIME_DEBUG
now = PR_Now();
#endif
#ifdef DEBUG_CC
PRUint32 purpleStart = mBuf.GetSize();
#endif
SelectPurple();
#ifdef DEBUG_CC
PRUint32 purpleEnd = mBuf.GetSize();
#endif
#ifdef COLLECT_TIME_DEBUG
printf("cc: SelectPurple() took %lldms\n",
(PR_Now() - now) / PR_USEC_PER_MSEC);
#endif
if (mBuf.GetSize() == 0) {
aTryCollections = 0;
} else {
mScanInProgress = PR_TRUE;
GCGraph graph;
// The main Bacon & Rajan collection algorithm.
#ifdef COLLECT_TIME_DEBUG
now = PR_Now();
#endif
MarkRoots(graph);
#ifdef COLLECT_TIME_DEBUG
{
PRTime then = PR_Now();
printf("cc: MarkRoots() took %lldms\n",
(then - now) / PR_USEC_PER_MSEC);
now = then;
}
#endif
ScanRoots(graph);
#ifdef COLLECT_TIME_DEBUG
printf("cc: ScanRoots() took %lldms\n",
(PR_Now() - now) / PR_USEC_PER_MSEC);
#endif
#ifdef DEBUG_CC
MaybeDrawGraphs(graph);
#endif
mScanInProgress = PR_FALSE;
#ifdef DEBUG_CC
if (aTryCollections != origTryCollections && purpleStart != purpleEnd) {
PRUint32 i = 0;
NodePool::Enumerator queue(graph.mNodes);
while (i++ < purpleStart) {
queue.GetNext();
}
while (i++ < purpleEnd) {
PtrInfo *pi = queue.GetNext();
if (pi->mColor == white) {
printf("nsCycleCollector: a later shutdown collection collected the additional\n"
" suspect %p %s\n"
" (which could be fixed by improving traversal)\n",
pi->mPointer, pi->mName);
}
}
}
#endif
#ifdef COLLECT_TIME_DEBUG
now = PR_Now();
#endif
PRBool collected = CollectWhite(graph);
#ifdef COLLECT_TIME_DEBUG
printf("cc: CollectWhite() took %lldms\n",
(PR_Now() - now) / PR_USEC_PER_MSEC);
#endif
// Some additional book-keeping.
--aTryCollections;
// Since runtimes may add wrappers to the purple buffer
// (which will mean we won't stop repeating due to the
// mBuf.GetSize() == 0 check above), we should stop
// repeating collections if we didn't collect anything
// this time.
if (!collected) {
aTryCollections = 0;
} else {
didCollect = PR_TRUE;
}
}
#ifdef DEBUG_CC
mStats.mCollection++;
if (mParams.mReportStats)
mStats.Dump();
#endif
}
for (PRUint32 i = 0; i <= nsIProgrammingLanguage::MAX; ++i) {
if (mRuntimes[i])
mRuntimes[i]->FinishCycleCollection();
}
if(collections == 0)
break;
totalCollections += collections;
}
mCollectionInProgress = PR_FALSE;
@ -2198,6 +2112,129 @@ nsCycleCollector::Collect(PRUint32 aTryCollections)
#ifdef DEBUG_CC
ExplainLiveExpectedGarbage();
#endif
return totalCollections > 0;
}
PRBool
nsCycleCollector::DoCollect()
{
if (mParams.mDoNothing)
return PR_FALSE;
// It is also essential to empty mBuf here because starting up
// collection in language runtimes may force some "current" suspects
// into mBuf.
mBuf.Empty();
GCGraph graph;
GCGraphBuilder builder(graph, mRuntimes);
#ifdef COLLECT_TIME_DEBUG
PRTime now = PR_Now();
#endif
for (PRUint32 i = 0; i <= nsIProgrammingLanguage::MAX; ++i) {
if (mRuntimes[i])
mRuntimes[i]->BeginCycleCollection(builder);
}
#ifdef COLLECT_TIME_DEBUG
printf("cc: mRuntimes[*]->BeginCycleCollection() took %lldms\n",
(PR_Now() - now) / PR_USEC_PER_MSEC);
now = PR_Now();
#endif
#ifdef DEBUG_CC
PRUint32 purpleStart = mBuf.GetSize();
#endif
SelectPurple();
#ifdef DEBUG_CC
PRUint32 purpleEnd = mBuf.GetSize();
#endif
#ifdef COLLECT_TIME_DEBUG
printf("cc: SelectPurple() took %lldms\n",
(PR_Now() - now) / PR_USEC_PER_MSEC);
#endif
PRBool didCollect = PR_FALSE;
if (builder.Count() > 0 || mBuf.GetSize() != 0) {
mScanInProgress = PR_TRUE;
// The main Bacon & Rajan collection algorithm.
#ifdef COLLECT_TIME_DEBUG
now = PR_Now();
#endif
MarkRoots(graph, builder);
#ifdef COLLECT_TIME_DEBUG
{
PRTime then = PR_Now();
printf("cc: MarkRoots() took %lldms\n",
(then - now) / PR_USEC_PER_MSEC);
now = then;
}
#endif
ScanRoots(graph);
#ifdef COLLECT_TIME_DEBUG
printf("cc: ScanRoots() took %lldms\n",
(PR_Now() - now) / PR_USEC_PER_MSEC);
#endif
#ifdef DEBUG_CC
MaybeDrawGraphs(graph);
#endif
mScanInProgress = PR_FALSE;
#ifdef DEBUG_CC
if (mFollowupCollection && purpleStart != purpleEnd) {
PRUint32 i = 0;
NodePool::Enumerator queue(graph.mNodes);
while (i++ < purpleStart) {
queue.GetNext();
}
while (i++ < purpleEnd) {
PtrInfo *pi = queue.GetNext();
if (pi->mColor == white) {
printf("nsCycleCollector: a later shutdown collection collected the additional\n"
" suspect %p %s\n"
" (which could be fixed by improving traversal)\n",
pi->mPointer, pi->mName);
}
}
}
#endif
#ifdef COLLECT_TIME_DEBUG
now = PR_Now();
#endif
didCollect = CollectWhite(graph);
#ifdef COLLECT_TIME_DEBUG
printf("cc: CollectWhite() took %lldms\n",
(PR_Now() - now) / PR_USEC_PER_MSEC);
#endif
}
#ifdef DEBUG_CC
mStats.mCollection++;
if (mParams.mReportStats)
mStats.Dump();
#endif
for (PRUint32 i = 0; i <= nsIProgrammingLanguage::MAX; ++i) {
if (mRuntimes[i])
mRuntimes[i]->FinishCycleCollection();
}
mFollowupCollection = PR_TRUE;
return didCollect;
}
@ -2260,13 +2297,6 @@ nsCycleCollector::ExplainLiveExpectedGarbage()
mBuf.Empty();
for (PRUint32 i = 0; i <= nsIProgrammingLanguage::MAX; ++i) {
if (mRuntimes[i]) {
mRuntimes[i]->BeginCycleCollection();
mRuntimes[i]->SuspectExtraPointers();
}
}
mCollectionInProgress = PR_TRUE;
mScanInProgress = PR_TRUE;
@ -2278,7 +2308,14 @@ nsCycleCollector::ExplainLiveExpectedGarbage()
PRUint32 suspectCurrentCount = mBuf.GetSize();
mExpectedGarbage.EnumerateEntries(&AddExpectedGarbage, this);
MarkRoots(graph);
GCGraphBuilder builder(graph, mRuntimes);
for (PRUint32 i = 0; i <= nsIProgrammingLanguage::MAX; ++i) {
if (mRuntimes[i])
mRuntimes[i]->BeginCycleCollection(builder);
}
MarkRoots(graph, builder);
ScanRoots(graph);
mScanInProgress = PR_FALSE;
@ -2343,13 +2380,22 @@ nsCycleCollector::ExplainLiveExpectedGarbage()
}
}
if (pi->mInternalRefs != pi->mRefCount) {
printf("nsCycleCollector: %s %p was not collected due "
"to %d\n"
" external references (%d total - %d known)\n",
pi->mName, pi->mPointer,
pi->mRefCount - pi->mInternalRefs,
pi->mRefCount, pi->mInternalRefs);
if (pi->mRefCount == PR_UINT32_MAX ||
(pi->mInternalRefs != pi->mRefCount && pi->mRefCount > 0)) {
if (pi->mRefCount == PR_UINT32_MAX) {
printf("nsCycleCollector: %s %p was not collected due "
"to \n"
" external references\n",
pi->mName, pi->mPointer);
}
else {
printf("nsCycleCollector: %s %p was not collected due "
"to %d\n"
" external references (%d total - %d known)\n",
pi->mName, pi->mPointer,
pi->mRefCount - pi->mInternalRefs,
pi->mRefCount, pi->mInternalRefs);
}
printf(" An object expected to be garbage could be "
"reached from it by the path:\n");
@ -2358,8 +2404,13 @@ nsCycleCollector::ExplainLiveExpectedGarbage()
path = path->mShortestPathToExpectedGarbage)
printf(" %s %p\n", path->mName, path->mPointer);
printf(" The %d known references to it were from:\n",
pi->mInternalRefs);
if (pi->mRefCount == PR_UINT32_MAX) {
printf(" The known references to it were from:\n");
}
else {
printf(" The %d known references to it were from:\n",
pi->mInternalRefs);
}
for (ReversedEdge *e = pi->mReversedEdges;
e; e = e->mNext) {
printf(" %s %p\n",
@ -2579,17 +2630,6 @@ NS_CycleCollectorSuspect(nsISupports *n)
}
void
nsCycleCollector_suspectCurrent(nsISupports *n)
{
if (sCollector) {
PRBool res = sCollector->Suspect(n, PR_TRUE);
NS_ASSERTION(res || sCollector->mParams.mDoNothing,
"suspectCurrent should not fail");
}
}
PRBool
NS_CycleCollectorForget(nsISupports *n)
{
@ -2605,6 +2645,12 @@ nsCycleCollector_collect()
return sCollector ? sCollector->Collect() : PR_FALSE;
}
PRBool
nsCycleCollector_doCollect()
{
return sCollector ? sCollector->DoCollect() : PR_FALSE;
}
nsresult
nsCycleCollector_startup()
{

Просмотреть файл

@ -44,32 +44,42 @@
class nsISupports;
class nsCycleCollectionParticipant;
class nsCycleCollectionTraversalCallback;
// An nsCycleCollectionLanguageRuntime is a per-language object that
// implements language-specific aspects of the cycle collection task.
struct nsCycleCollectionLanguageRuntime
{
virtual nsresult BeginCycleCollection() = 0;
virtual nsresult BeginCycleCollection(nsCycleCollectionTraversalCallback &cb) = 0;
virtual nsresult FinishCycleCollection() = 0;
virtual nsCycleCollectionParticipant *ToParticipant(void *p) = 0;
#ifdef DEBUG_CC
virtual void PrintAllReferencesTo(void *p) = 0;
// Call suspectCurrent on any extra pointers that will help build a
// larger object graph for debugging.
virtual void SuspectExtraPointers() = 0;
#endif
};
// PRBool nsCycleCollector_suspect(nsISupports *n);
NS_COM void nsCycleCollector_suspectCurrent(nsISupports *n);
// NS_COM PRBool nsCycleCollector_forget(nsISupports *n);
nsresult nsCycleCollector_startup();
// Returns PR_TRUE if some nodes were collected.
NS_COM PRBool nsCycleCollector_collect();
void nsCycleCollector_shutdown();
// The JS runtime is special, it needs to call cycle collection during its GC.
// If the JS runtime is registered nsCycleCollector_collect will call
// nsCycleCollectionJSRuntime::Collect which will call
// nsCycleCollector_doCollect, else nsCycleCollector_collect will call
// nsCycleCollector_doCollect directly.
struct nsCycleCollectionJSRuntime : public nsCycleCollectionLanguageRuntime
{
/**
* Runs cycle collection and returns the number of collections that have
* collected nodes.
*/
virtual PRUint32 Collect() = 0;
};
// Returns PR_TRUE if some nodes were collected.
NS_COM PRBool nsCycleCollector_doCollect();
#ifdef DEBUG
NS_COM void nsCycleCollector_DEBUG_shouldBeFreed(nsISupports *n);
NS_COM void nsCycleCollector_DEBUG_wasFreed(nsISupports *n);

Просмотреть файл

@ -89,18 +89,26 @@ NS_DEFINE_STATIC_IID_ACCESSOR(nsCycleCollectionISupports,
class nsCycleCollectionParticipant;
enum CCNodeType { RefCounted, GCMarked, GCUnmarked };
class NS_NO_VTABLE nsCycleCollectionTraversalCallback
{
public:
// You must call DescribeNode() with an accurate refcount,
// otherwise cycle collection will fail, and probably crash.
// If type is RefCounted you must call DescribeNode() with an accurate
// refcount, otherwise cycle collection will fail, and probably crash.
// If type is not refcounted then the refcount will be ignored.
#ifdef DEBUG_CC
NS_IMETHOD_(void) DescribeNode(nsrefcnt refcount,
NS_IMETHOD_(void) DescribeNode(CCNodeType type,
nsrefcnt refcount,
size_t objsz,
const char *objname) = 0;
#else
NS_IMETHOD_(void) DescribeNode(nsrefcnt refcount) = 0;
NS_IMETHOD_(void) DescribeNode(CCNodeType type,
nsrefcnt refcount) = 0;
#endif
NS_IMETHOD_(void) NoteXPCOMRoot(nsISupports *root) = 0;
NS_IMETHOD_(void) NoteRoot(PRUint32 langID, void *root,
nsCycleCollectionParticipant* helper) = 0;
NS_IMETHOD_(void) NoteScriptChild(PRUint32 langID, void *child) = 0;
NS_IMETHOD_(void) NoteXPCOMChild(nsISupports *child) = 0;
NS_IMETHOD_(void) NoteNativeChild(void *child,
@ -285,10 +293,10 @@ public:
#ifdef DEBUG_CC
#define NS_IMPL_CYCLE_COLLECTION_DESCRIBE(_class) \
cb.DescribeNode(tmp->mRefCnt.get(), sizeof(_class), #_class);
cb.DescribeNode(RefCounted, tmp->mRefCnt.get(), sizeof(_class), #_class);
#else
#define NS_IMPL_CYCLE_COLLECTION_DESCRIBE(_class) \
cb.DescribeNode(tmp->mRefCnt.get());
cb.DescribeNode(RefCounted, tmp->mRefCnt.get());
#endif
#define NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(_class) \