Merge mozilla-central and mozilla-inbound

This commit is contained in:
Brian Hackett 2011-12-03 14:36:54 -08:00
Родитель 9061d67699 32088faf78
Коммит 335ec03892
189 изменённых файлов: 7691 добавлений и 7347 удалений

Просмотреть файл

@ -188,3 +188,7 @@ https://www.bank2.com:443 privileged,cert=escapeattack2
#
https://redirproxy.example.com:443 privileged,redir=test1.example.com
# Host used for IndexedDB Quota testing
http://bug704464-1.example.com:80 privileged
http://bug704464-2.example.com:80 privileged
http://bug704464-3.example.com:80 privileged

Просмотреть файл

@ -1738,7 +1738,7 @@ nsScriptSecurityManager::CheckFunctionAccess(JSContext *aCx, void *aFunObj,
#ifdef DEBUG
{
JS_ASSERT(JS_ObjectIsFunction(aCx, (JSObject *)aFunObj));
JSFunction *fun = (JSFunction *)JS_GetPrivate(aCx, (JSObject *)aFunObj);
JSFunction *fun = JS_GetObjectFunction((JSObject *)aFunObj);
JSScript *script = JS_GetFunctionScript(aCx, fun);
NS_ASSERTION(!script, "Null principal for non-native function!");
@ -2219,7 +2219,7 @@ nsScriptSecurityManager::GetFunctionObjectPrincipal(JSContext *cx,
return result;
}
JSFunction *fun = (JSFunction *)JS_GetPrivate(cx, obj);
JSFunction *fun = JS_GetObjectFunction(obj);
JSScript *script = JS_GetFunctionScript(cx, fun);
if (!script)
@ -2243,7 +2243,7 @@ nsScriptSecurityManager::GetFunctionObjectPrincipal(JSContext *cx,
script = frameScript;
}
else if (JS_GetFunctionObject(fun) != obj)
else if (!js::IsOriginalScriptFunction(fun))
{
// Here, obj is a cloned function object. In this case, the
// clone's prototype may have been precompiled from brutally
@ -2285,7 +2285,7 @@ nsScriptSecurityManager::GetFramePrincipal(JSContext *cx,
#ifdef DEBUG
if (NS_SUCCEEDED(*rv) && !result)
{
JSFunction *fun = (JSFunction *)JS_GetPrivate(cx, obj);
JSFunction *fun = JS_GetObjectFunction(obj);
JSScript *script = JS_GetFunctionScript(cx, fun);
NS_ASSERTION(!script, "Null principal for non-native function!");
@ -2432,7 +2432,7 @@ nsScriptSecurityManager::doGetObjectPrincipal(JSObject *aObj
jsClass = js::GetObjectClass(aObj);
if (jsClass == &js::CallClass) {
aObj = js::GetObjectParent(aObj);
aObj = js::GetObjectParentMaybeScope(aObj);
if (!aObj)
return nsnull;
@ -2484,7 +2484,7 @@ nsScriptSecurityManager::doGetObjectPrincipal(JSObject *aObj
}
}
aObj = js::GetObjectParent(aObj);
aObj = js::GetObjectParentMaybeScope(aObj);
if (!aObj)
break;

Просмотреть файл

@ -279,7 +279,7 @@ AsyncConnectionHelper::Run()
if (NS_SUCCEEDED(rv)) {
bool hasSavepoint = false;
if (mDatabase) {
IndexedDatabaseManager::SetCurrentDatabase(mDatabase);
IndexedDatabaseManager::SetCurrentWindow(mDatabase->Owner());
// Make the first savepoint.
if (mTransaction) {
@ -292,7 +292,7 @@ AsyncConnectionHelper::Run()
mResultCode = DoDatabaseWork(connection);
if (mDatabase) {
IndexedDatabaseManager::SetCurrentDatabase(nsnull);
IndexedDatabaseManager::SetCurrentWindow(nsnull);
// Release or roll back the savepoint depending on the error code.
if (hasSavepoint) {

Просмотреть файл

@ -98,11 +98,9 @@ GetQuotaPermissions(const nsACString& aASCIIOrigin,
} // anonymous namespace
CheckQuotaHelper::CheckQuotaHelper(IDBDatabase* aDatabase,
CheckQuotaHelper::CheckQuotaHelper(nsPIDOMWindow* aWindow,
mozilla::Mutex& aMutex)
: mWindow(aDatabase->Owner()),
mWindowSerial(mWindow->GetSerial()),
mOrigin(aDatabase->Origin()),
: mWindow(aWindow),
mMutex(aMutex),
mCondVar(mMutex, "CheckQuotaHelper::mCondVar"),
mPromptResult(0),
@ -175,51 +173,59 @@ CheckQuotaHelper::Run()
{
NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
if (!mHasPrompted) {
mPromptResult = GetQuotaPermissions(mOrigin, mWindow);
nsresult rv = NS_OK;
if (mASCIIOrigin.IsEmpty()) {
rv = IndexedDatabaseManager::GetASCIIOriginFromWindow(mWindow,
mASCIIOrigin);
}
nsresult rv;
if (mHasPrompted) {
// Add permissions to the database, but only if we are in the parent
// process (if we are in the child process, we have already
// set the permission when the prompt was shown in the parent, as
// we cannot set the permission from the child).
if (mPromptResult != nsIPermissionManager::UNKNOWN_ACTION &&
XRE_GetProcessType() == GeckoProcessType_Default) {
nsCOMPtr<nsIURI> uri;
rv = NS_NewURI(getter_AddRefs(uri), mOrigin);
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIPermissionManager> permissionManager =
do_GetService(NS_PERMISSIONMANAGER_CONTRACTID);
NS_ENSURE_STATE(permissionManager);
rv = permissionManager->Add(uri, PERMISSION_INDEXEDDB_UNLIMITED,
mPromptResult,
nsIPermissionManager::EXPIRE_NEVER, 0);
NS_ENSURE_SUCCESS(rv, rv);
if (NS_SUCCEEDED(rv)) {
if (!mHasPrompted) {
mPromptResult = GetQuotaPermissions(mASCIIOrigin, mWindow);
}
}
else if (mPromptResult == nsIPermissionManager::UNKNOWN_ACTION) {
PRUint32 quota = IndexedDatabaseManager::GetIndexedDBQuotaMB();
nsString quotaString;
quotaString.AppendInt(quota);
if (mHasPrompted) {
// Add permissions to the database, but only if we are in the parent
// process (if we are in the child process, we have already
// set the permission when the prompt was shown in the parent, as
// we cannot set the permission from the child).
if (mPromptResult != nsIPermissionManager::UNKNOWN_ACTION &&
XRE_GetProcessType() == GeckoProcessType_Default) {
nsCOMPtr<nsIURI> uri;
rv = NS_NewURI(getter_AddRefs(uri), mASCIIOrigin);
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIPermissionManager> permissionManager =
do_GetService(NS_PERMISSIONMANAGER_CONTRACTID);
NS_ENSURE_STATE(permissionManager);
rv = permissionManager->Add(uri, PERMISSION_INDEXEDDB_UNLIMITED,
mPromptResult,
nsIPermissionManager::EXPIRE_NEVER, 0);
NS_ENSURE_SUCCESS(rv, rv);
}
}
else if (mPromptResult == nsIPermissionManager::UNKNOWN_ACTION) {
PRUint32 quota = IndexedDatabaseManager::GetIndexedDBQuotaMB();
nsCOMPtr<nsIObserverService> obs = GetObserverService();
NS_ENSURE_STATE(obs);
nsString quotaString;
quotaString.AppendInt(quota);
// We have to watch to make sure that the window doesn't go away without
// responding to us. Otherwise our database threads will hang.
rv = obs->AddObserver(this, DOM_WINDOW_DESTROYED_TOPIC, false);
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIObserverService> obs = GetObserverService();
NS_ENSURE_STATE(obs);
rv = obs->NotifyObservers(static_cast<nsIRunnable*>(this),
TOPIC_QUOTA_PROMPT, quotaString.get());
NS_ENSURE_SUCCESS(rv, rv);
// We have to watch to make sure that the window doesn't go away without
// responding to us. Otherwise our database threads will hang.
rv = obs->AddObserver(this, DOM_WINDOW_DESTROYED_TOPIC, false);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
rv = obs->NotifyObservers(static_cast<nsIRunnable*>(this),
TOPIC_QUOTA_PROMPT, quotaString.get());
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
}
MutexAutoLock lock(mMutex);

Просмотреть файл

@ -65,23 +65,17 @@ public:
NS_DECL_NSIINTERFACEREQUESTOR
NS_DECL_NSIOBSERVER
CheckQuotaHelper(IDBDatabase* aDatabase,
CheckQuotaHelper(nsPIDOMWindow* aWindow,
mozilla::Mutex& aMutex);
bool PromptAndReturnQuotaIsDisabled();
void Cancel();
PRUint32 WindowSerial()
{
return mWindowSerial;
}
private:
nsPIDOMWindow* mWindow;
PRUint32 mWindowSerial;
nsCString mOrigin;
nsCString mASCIIOrigin;
mozilla::Mutex& mMutex;
mozilla::CondVar mCondVar;
PRUint32 mPromptResult;

Просмотреть файл

@ -64,12 +64,6 @@ USING_INDEXEDDB_NAMESPACE
namespace {
PRUint32 gDatabaseInstanceCount = 0;
mozilla::Mutex* gPromptHelpersMutex = nsnull;
// Protected by gPromptHelpersMutex.
nsTArray<nsRefPtr<CheckQuotaHelper> >* gPromptHelpers = nsnull;
class CreateObjectStoreHelper : public AsyncConnectionHelper
{
public:
@ -195,11 +189,6 @@ IDBDatabase::IDBDatabase()
mRunningVersionChange(false)
{
NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
if (!gDatabaseInstanceCount++) {
NS_ASSERTION(!gPromptHelpersMutex, "Should be null!");
gPromptHelpersMutex = new mozilla::Mutex("IDBDatabase gPromptHelpersMutex");
}
}
IDBDatabase::~IDBDatabase()
@ -218,86 +207,20 @@ IDBDatabase::~IDBDatabase()
if (mListenerManager) {
mListenerManager->Disconnect();
}
if (!--gDatabaseInstanceCount) {
NS_ASSERTION(gPromptHelpersMutex, "Should not be null!");
delete gPromptHelpers;
gPromptHelpers = nsnull;
delete gPromptHelpersMutex;
gPromptHelpersMutex = nsnull;
}
}
bool
IDBDatabase::IsQuotaDisabled()
{
NS_ASSERTION(!NS_IsMainThread(), "Wrong thread!");
NS_ASSERTION(gPromptHelpersMutex, "This should never be null!");
MutexAutoLock lock(*gPromptHelpersMutex);
if (!gPromptHelpers) {
gPromptHelpers = new nsAutoTArray<nsRefPtr<CheckQuotaHelper>, 10>();
}
CheckQuotaHelper* foundHelper = nsnull;
PRUint32 count = gPromptHelpers->Length();
for (PRUint32 index = 0; index < count; index++) {
nsRefPtr<CheckQuotaHelper>& helper = gPromptHelpers->ElementAt(index);
if (helper->WindowSerial() == Owner()->GetSerial()) {
foundHelper = helper;
break;
}
}
if (!foundHelper) {
nsRefPtr<CheckQuotaHelper>* newHelper = gPromptHelpers->AppendElement();
if (!newHelper) {
NS_WARNING("Out of memory!");
return false;
}
*newHelper = new CheckQuotaHelper(this, *gPromptHelpersMutex);
foundHelper = *newHelper;
{
// Unlock before calling out to XPCOM.
MutexAutoUnlock unlock(*gPromptHelpersMutex);
nsresult rv = NS_DispatchToMainThread(foundHelper, NS_DISPATCH_NORMAL);
NS_ENSURE_SUCCESS(rv, false);
}
}
return foundHelper->PromptAndReturnQuotaIsDisabled();
}
void
IDBDatabase::Invalidate()
{
NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
NS_ASSERTION(gPromptHelpersMutex, "This should never be null!");
// Make sure we're closed too.
Close();
// Cancel any quota prompts that are currently being displayed.
{
MutexAutoLock lock(*gPromptHelpersMutex);
if (gPromptHelpers) {
PRUint32 count = gPromptHelpers->Length();
for (PRUint32 index = 0; index < count; index++) {
nsRefPtr<CheckQuotaHelper>& helper = gPromptHelpers->ElementAt(index);
if (helper->WindowSerial() == Owner()->GetSerial()) {
helper->Cancel();
break;
}
}
}
}
// When the IndexedDatabaseManager needs to invalidate databases, all it has
// is an origin, so we call back into the manager to cancel any prompts for
// our owner.
IndexedDatabaseManager::CancelPromptsForWindow(Owner());
mInvalidated = true;
}

Просмотреть файл

@ -122,8 +122,6 @@ public:
return doc.forget();
}
bool IsQuotaDisabled();
nsCString& Origin()
{
return mASCIIOrigin;

Просмотреть файл

@ -403,24 +403,10 @@ IDBFactory::OpenCommon(const nsAString& aName,
nsIScriptContext* context = sgo->GetContext();
NS_ENSURE_TRUE(context, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
nsCOMPtr<nsIPrincipal> principal;
nsresult rv = nsContentUtils::GetSecurityManager()->
GetSubjectPrincipal(getter_AddRefs(principal));
NS_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
nsCString origin;
if (nsContentUtils::IsSystemPrincipal(principal)) {
origin.AssignLiteral("chrome");
}
else {
rv = nsContentUtils::GetASCIIOrigin(principal, origin);
NS_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
if (origin.EqualsLiteral("null")) {
NS_WARNING("IndexedDB databases not allowed for this principal!");
return NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR;
}
}
nsresult rv =
IndexedDatabaseManager::GetASCIIOriginFromWindow(window, origin);
NS_ENSURE_SUCCESS(rv, rv);
nsRefPtr<IDBOpenDBRequest> request =
IDBOpenDBRequest::Create(context, window);

Просмотреть файл

@ -887,7 +887,7 @@ CommitHelper::Run()
}
if (mConnection) {
IndexedDatabaseManager::SetCurrentDatabase(database);
IndexedDatabaseManager::SetCurrentWindow(database->Owner());
if (!mAborted) {
NS_NAMED_LITERAL_CSTRING(release, "COMMIT TRANSACTION");
@ -923,7 +923,7 @@ CommitHelper::Run()
mConnection->Close();
mConnection = nsnull;
IndexedDatabaseManager::SetCurrentDatabase(nsnull);
IndexedDatabaseManager::SetCurrentWindow(nsnull);
}
return NS_OK;

Просмотреть файл

@ -41,6 +41,8 @@
#include "nsIFile.h"
#include "nsIObserverService.h"
#include "nsIScriptObjectPrincipal.h"
#include "nsIScriptSecurityManager.h"
#include "nsISHEntry.h"
#include "nsISimpleEnumerator.h"
#include "nsITimer.h"
@ -54,6 +56,7 @@
#include "nsXPCOMPrivate.h"
#include "AsyncConnectionHelper.h"
#include "CheckQuotaHelper.h"
#include "IDBDatabase.h"
#include "IDBEvents.h"
#include "IDBFactory.h"
@ -75,7 +78,7 @@
#define PREF_INDEXEDDB_QUOTA "dom.indexedDB.warningQuota"
// A bad TLS index number.
#define BAD_TLS_INDEX (PRUintn)-1
#define BAD_TLS_INDEX (PRUintn)-1
USING_INDEXEDDB_NAMESPACE
using namespace mozilla::services;
@ -88,8 +91,6 @@ PRInt32 gShutdown = 0;
// Does not hold a reference.
IndexedDatabaseManager* gInstance = nsnull;
PRUintn gCurrentDatabaseIndex = BAD_TLS_INDEX;
PRInt32 gIndexedDBQuotaMB = DEFAULT_QUOTA_MB;
class QuotaCallback : public mozIStorageQuotaCallback
@ -104,13 +105,7 @@ public:
nsISupports* aUserData,
PRInt64* _retval)
{
NS_ASSERTION(gCurrentDatabaseIndex != BAD_TLS_INDEX,
"This should be impossible!");
IDBDatabase* database =
static_cast<IDBDatabase*>(PR_GetThreadPrivate(gCurrentDatabaseIndex));
if (database && database->IsQuotaDisabled()) {
if (IndexedDatabaseManager::QuotaIsLifted()) {
*_retval = 0;
return NS_OK;
}
@ -146,6 +141,8 @@ EnumerateToTArray(const nsACString& aKey,
} // anonymous namespace
IndexedDatabaseManager::IndexedDatabaseManager()
: mCurrentWindowIndex(BAD_TLS_INDEX),
mQuotaHelperMutex("IndexedDatabaseManager.mQuotaHelperMutex")
{
NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
NS_ASSERTION(!gInstance, "More than one instance!");
@ -172,30 +169,31 @@ IndexedDatabaseManager::GetOrCreate()
nsRefPtr<IndexedDatabaseManager> instance(gInstance);
if (!instance) {
// We need a thread-local to hold our current database.
if (gCurrentDatabaseIndex == BAD_TLS_INDEX) {
if (PR_NewThreadPrivateIndex(&gCurrentDatabaseIndex, nsnull) !=
PR_SUCCESS) {
NS_ERROR("PR_NewThreadPrivateIndex failed!");
gCurrentDatabaseIndex = BAD_TLS_INDEX;
return nsnull;
}
if (NS_FAILED(Preferences::AddIntVarCache(&gIndexedDBQuotaMB,
PREF_INDEXEDDB_QUOTA,
DEFAULT_QUOTA_MB))) {
NS_WARNING("Unable to respond to quota pref changes!");
gIndexedDBQuotaMB = DEFAULT_QUOTA_MB;
}
if (NS_FAILED(Preferences::AddIntVarCache(&gIndexedDBQuotaMB,
PREF_INDEXEDDB_QUOTA,
DEFAULT_QUOTA_MB))) {
NS_WARNING("Unable to respond to quota pref changes!");
gIndexedDBQuotaMB = DEFAULT_QUOTA_MB;
}
instance = new IndexedDatabaseManager();
if (!instance->mLiveDatabases.Init()) {
if (!instance->mLiveDatabases.Init() ||
!instance->mQuotaHelperHash.Init()) {
NS_WARNING("Out of memory!");
return nsnull;
}
// We need a thread-local to hold the current window.
NS_ASSERTION(instance->mCurrentWindowIndex == BAD_TLS_INDEX, "Huh?");
if (PR_NewThreadPrivateIndex(&instance->mCurrentWindowIndex, nsnull) !=
PR_SUCCESS) {
NS_ERROR("PR_NewThreadPrivateIndex failed, IndexedDB disabled");
instance->mCurrentWindowIndex = BAD_TLS_INDEX;
return nsnull;
}
// Make a timer here to avoid potential failures later. We don't actually
// initialize the timer until shutdown.
instance->mShutdownTimer = do_CreateInstance(NS_TIMER_CONTRACTID);
@ -548,30 +546,23 @@ IndexedDatabaseManager::OnDatabaseClosed(IDBDatabase* aDatabase)
}
}
// static
bool
IndexedDatabaseManager::SetCurrentDatabase(IDBDatabase* aDatabase)
void
IndexedDatabaseManager::SetCurrentWindowInternal(nsPIDOMWindow* aWindow)
{
NS_ASSERTION(gCurrentDatabaseIndex != BAD_TLS_INDEX,
"This should have been set already!");
if (aWindow) {
#ifdef DEBUG
if (aDatabase) {
NS_ASSERTION(!PR_GetThreadPrivate(gCurrentDatabaseIndex),
"Someone forgot to unset gCurrentDatabaseIndex!");
NS_ASSERTION(!PR_GetThreadPrivate(mCurrentWindowIndex),
"Somebody forgot to clear the current window!");
#endif
PR_SetThreadPrivate(mCurrentWindowIndex, aWindow);
}
else {
NS_ASSERTION(PR_GetThreadPrivate(gCurrentDatabaseIndex),
"Someone forgot to set gCurrentDatabaseIndex!");
}
#ifdef DEBUG
NS_ASSERTION(PR_GetThreadPrivate(mCurrentWindowIndex),
"Somebody forgot to clear the current window!");
#endif
if (PR_SetThreadPrivate(gCurrentDatabaseIndex, aDatabase) != PR_SUCCESS) {
NS_WARNING("Failed to set gCurrentDatabaseIndex!");
return false;
PR_SetThreadPrivate(mCurrentWindowIndex, nsnull);
}
return true;
}
// static
@ -662,6 +653,102 @@ IndexedDatabaseManager::EnsureQuotaManagementForDirectory(nsIFile* aDirectory)
return rv;
}
bool
IndexedDatabaseManager::QuotaIsLiftedInternal()
{
nsPIDOMWindow* window = nsnull;
nsRefPtr<CheckQuotaHelper> helper = nsnull;
bool createdHelper = false;
window =
static_cast<nsPIDOMWindow*>(PR_GetThreadPrivate(mCurrentWindowIndex));
// Once IDB is supported outside of Windows this should become an early
// return true.
NS_ASSERTION(window, "Why don't we have a Window here?");
// Hold the lock from here on.
MutexAutoLock autoLock(mQuotaHelperMutex);
mQuotaHelperHash.Get(window, getter_AddRefs(helper));
if (!helper) {
helper = new CheckQuotaHelper(window, mQuotaHelperMutex);
createdHelper = true;
bool result = mQuotaHelperHash.Put(window, helper);
NS_ENSURE_TRUE(result, result);
// Unlock while calling out to XPCOM
{
MutexAutoUnlock autoUnlock(mQuotaHelperMutex);
nsresult rv = NS_DispatchToMainThread(helper);
NS_ENSURE_SUCCESS(rv, false);
}
// Relocked. If any other threads hit the quota limit on the same Window,
// they are using the helper we created here and are now blocking in
// PromptAndReturnQuotaDisabled.
}
bool result = helper->PromptAndReturnQuotaIsDisabled();
// If this thread created the helper and added it to the hash, this thread
// must remove it.
if (createdHelper) {
mQuotaHelperHash.Remove(window);
}
return result;
}
void
IndexedDatabaseManager::CancelPromptsForWindowInternal(nsPIDOMWindow* aWindow)
{
NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
nsRefPtr<CheckQuotaHelper> helper;
MutexAutoLock autoLock(mQuotaHelperMutex);
mQuotaHelperHash.Get(aWindow, getter_AddRefs(helper));
if (helper) {
helper->Cancel();
}
}
// static
nsresult
IndexedDatabaseManager::GetASCIIOriginFromWindow(nsPIDOMWindow* aWindow,
nsCString& aASCIIOrigin)
{
NS_ASSERTION(NS_IsMainThread(),
"We're about to touch a window off the main thread!");
nsCOMPtr<nsIScriptObjectPrincipal> sop = do_QueryInterface(aWindow);
NS_ENSURE_TRUE(sop, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
nsCOMPtr<nsIPrincipal> principal = sop->GetPrincipal();
NS_ENSURE_TRUE(principal, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
if (nsContentUtils::IsSystemPrincipal(principal)) {
aASCIIOrigin.AssignLiteral("chrome");
}
else {
nsresult rv = nsContentUtils::GetASCIIOrigin(principal, aASCIIOrigin);
NS_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
if (aASCIIOrigin.EqualsLiteral("null")) {
NS_WARNING("IndexedDB databases not allowed for this principal!");
return NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR;
}
}
return NS_OK;
}
// static
nsresult
IndexedDatabaseManager::DispatchHelper(AsyncConnectionHelper* aHelper)

Просмотреть файл

@ -44,6 +44,8 @@
#include "mozilla/dom/indexedDB/IDBDatabase.h"
#include "mozilla/dom/indexedDB/IDBRequest.h"
#include "mozilla/Mutex.h"
#include "nsIIndexedDatabaseManager.h"
#include "nsIObserver.h"
#include "nsIRunnable.h"
@ -51,6 +53,7 @@
#include "nsIURI.h"
#include "nsClassHashtable.h"
#include "nsRefPtrHashtable.h"
#include "nsHashKeys.h"
#define INDEXEDDB_MANAGER_CONTRACTID "@mozilla.org/dom/indexeddb/manager;1"
@ -62,6 +65,8 @@ BEGIN_INDEXEDDB_NAMESPACE
class AsyncConnectionHelper;
class CheckQuotaHelper;
class IndexedDatabaseManager : public nsIIndexedDatabaseManager,
public nsIObserver
{
@ -129,14 +134,45 @@ public:
// Used to check if there are running transactions in a given window.
bool HasOpenTransactions(nsPIDOMWindow* aWindow);
static bool
SetCurrentDatabase(IDBDatabase* aDatabase);
// Set the Window that the current thread is doing operations for.
// The caller is responsible for ensuring that aWindow is held alive.
static inline void
SetCurrentWindow(nsPIDOMWindow* aWindow)
{
IndexedDatabaseManager* mgr = Get();
NS_ASSERTION(mgr, "Must have a manager here!");
return mgr->SetCurrentWindowInternal(aWindow);
}
static PRUint32
GetIndexedDBQuotaMB();
nsresult EnsureQuotaManagementForDirectory(nsIFile* aDirectory);
// Determine if the quota is lifted for the Window the current thread is
// using.
static inline bool
QuotaIsLifted()
{
IndexedDatabaseManager* mgr = Get();
NS_ASSERTION(mgr, "Must have a manager here!");
return mgr->QuotaIsLiftedInternal();
}
static inline void
CancelPromptsForWindow(nsPIDOMWindow* aWindow)
{
IndexedDatabaseManager* mgr = Get();
NS_ASSERTION(mgr, "Must have a manager here!");
mgr->CancelPromptsForWindowInternal(aWindow);
}
static nsresult
GetASCIIOriginFromWindow(nsPIDOMWindow* aWindow, nsCString& aASCIIOrigin);
private:
IndexedDatabaseManager();
~IndexedDatabaseManager();
@ -147,6 +183,10 @@ private:
WaitingOnDatabasesCallback aCallback,
void* aClosure);
void SetCurrentWindowInternal(nsPIDOMWindow* aWindow);
bool QuotaIsLiftedInternal();
void CancelPromptsForWindowInternal(nsPIDOMWindow* aWindow);
// Called when a database is created.
bool RegisterDatabase(IDBDatabase* aDatabase);
@ -267,6 +307,15 @@ private:
// Maintains a list of live databases per origin.
nsClassHashtable<nsCStringHashKey, nsTArray<IDBDatabase*> > mLiveDatabases;
// TLS storage index for the current thread's window
PRUintn mCurrentWindowIndex;
// Lock protecting mQuotaHelperHash
mozilla::Mutex mQuotaHelperMutex;
// A map of Windows to the corresponding quota helper.
nsRefPtrHashtable<nsPtrHashKey<nsPIDOMWindow>, CheckQuotaHelper> mQuotaHelperHash;
// Maintains a list of origins that we're currently enumerating to gather
// usage statistics.
nsAutoTArray<nsRefPtr<AsyncUsageRunnable>, 1> mUsageRunnables;
@ -290,6 +339,21 @@ private:
nsTArray<nsCString> mTrackedQuotaPaths;
};
class AutoEnterWindow
{
public:
AutoEnterWindow(nsPIDOMWindow* aWindow)
{
NS_ASSERTION(aWindow, "This should never be null!");
IndexedDatabaseManager::SetCurrentWindow(aWindow);
}
~AutoEnterWindow()
{
IndexedDatabaseManager::SetCurrentWindow(nsnull);
}
};
END_INDEXEDDB_NAMESPACE
#endif /* mozilla_dom_indexeddb_indexeddatabasemanager_h__ */

Просмотреть файл

@ -1090,6 +1090,14 @@ OpenDatabaseHelper::DoDatabaseWork()
return NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR;
}
NS_ASSERTION(mOpenDBRequest, "This should never be null!");
// Once we support IDB outside of Windows this assertion will no longer hold.
nsPIDOMWindow* window = mOpenDBRequest->Owner();
NS_ASSERTION(window, "This should never be null");
AutoEnterWindow autoWindow(window);
nsCOMPtr<nsIFile> dbFile;
nsresult rv = GetDatabaseFile(mASCIIOrigin, mName, getter_AddRefs(dbFile));
NS_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);

Просмотреть файл

@ -129,6 +129,8 @@ BROWSER_TEST_FILES = \
browser_quotaPrompt.html \
browser_quotaPromptAllow.js \
browser_quotaPromptDeny.js \
browser_quotaPromptDatabases.html \
browser_quotaPromptDatabases.js \
head.js \
$(NULL)

Просмотреть файл

@ -4,7 +4,7 @@
*/
// Make sure this is a unique origin or the tests will randomly fail!
const testPageURL = "http://test1.example.org/browser/" +
const testPageURL = "http://bug704464-1.example.com/browser/" +
"dom/indexedDB/test/browser_quotaPrompt.html";
const notificationID = "indexedDB-quota-prompt";

Просмотреть файл

@ -0,0 +1,55 @@
<!--
Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/
-->
<html>
<head>
<title>Indexed Database Test</title>
<script type="text/javascript;version=1.7">
const READ_WRITE = Components.interfaces.nsIIDBTransaction.READ_WRITE;
let db;
let i = 0;
function onAddMore() {
const name = window.location.pathname + i++;
let request = mozIndexedDB.open(name, 1);
request.onerror = errorHandler;
request.onsuccess = grabEventAndContinueHandler;
request.onsuccess = function(event) {
setTimeout(testFinishedCallback, 0, "complete");
}
request.onerror = function(event) {
setTimeout(testFinishedCallback, 0, "abort");
}
}
function onDone() {
window.removeEventListener("indexedDB-addMore", onAddMore, true);
window.removeEventListener("indexedDB-done", onDone, true);
testResult = "finished";
testException = undefined;
finishTest();
}
function testSteps()
{
window.addEventListener("indexedDB-addMore", onAddMore, true);
window.addEventListener("indexedDB-done", onDone, true);
setTimeout(testFinishedCallback, 0, "ready");
yield;
}
</script>
<script type="text/javascript;version=1.7" src="browserHelpers.js"></script>
</head>
<body onload="runTest();" onunload="finishTestNow();"></body>
</html>

Просмотреть файл

@ -0,0 +1,76 @@
/**
* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/
*/
// Make sure this is a unique origin or the tests will randomly fail!
const testPageURL = "http://bug704464-3.example.com/browser/" +
"dom/indexedDB/test/browser_quotaPromptDatabases.html";
const notificationID = "indexedDB-quota-prompt";
function test()
{
waitForExplicitFinish();
requestLongerTimeout(10);
setPermission(testPageURL, "indexedDB");
removePermission(testPageURL, "indexedDB-unlimited");
Services.prefs.setIntPref("dom.indexedDB.warningQuota", 2);
executeSoon(test1);
}
let addMoreTest1Count = 0;
function test1()
{
gBrowser.selectedTab = gBrowser.addTab();
gBrowser.selectedBrowser.addEventListener("load", function () {
gBrowser.selectedBrowser.removeEventListener("load", arguments.callee, true);
let seenPopupCount;
setFinishedCallback(function(result) {
is(result, "ready", "Got 'ready' result");
setFinishedCallback(function(result) {
is(result, "complete", "Got 'complete' result");
if (addMoreTest1Count >= seenPopupCount + 5) {
setFinishedCallback(function(result) {
is(result, "finished", "Got 'finished' result");
is(getPermission(testPageURL, "indexedDB-unlimited"),
Components.interfaces.nsIPermissionManager.ALLOW_ACTION,
"Correct permission set");
gBrowser.removeCurrentTab();
unregisterAllPopupEventHandlers();
addMoreTest1Count = seenPopupCount;
executeSoon(finish);
});
executeSoon(function() { dispatchEvent("indexedDB-done"); });
}
else {
++addMoreTest1Count;
executeSoon(function() { dispatchEvent("indexedDB-addMore"); });
}
});
++addMoreTest1Count;
executeSoon(function() { dispatchEvent("indexedDB-addMore"); });
});
registerPopupEventHandler("popupshowing", function () {
ok(true, "prompt showing");
seenPopupCount = addMoreTest1Count - 1;
});
registerPopupEventHandler("popupshown", function () {
ok(true, "prompt shown");
triggerMainCommand(this);
});
registerPopupEventHandler("popuphidden", function () {
ok(true, "prompt hidden");
});
}, true);
info("loading test page: " + testPageURL);
content.location = testPageURL;
}

Просмотреть файл

@ -4,7 +4,7 @@
*/
// Make sure this is a unique origin or the tests will randomly fail!
const testPageURL = "http://test2.example.org/browser/" +
const testPageURL = "http://bug704464-2.example.com/browser/" +
"dom/indexedDB/test/browser_quotaPrompt.html";
const notificationID = "indexedDB-quota-prompt";

Просмотреть файл

@ -40,6 +40,7 @@
// FIXME(bug 332648): Give me a real API please!
#include "jscntxt.h"
#include "jsfriendapi.h"
#include "nsIInterfaceRequestorUtils.h"
#include "nsJSNPRuntime.h"
@ -1483,7 +1484,7 @@ CallNPMethodInternal(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
// the function object.
if (npobj->_class->invoke) {
JSFunction *fun = (JSFunction *)::JS_GetPrivate(cx, funobj);
JSFunction *fun = ::JS_GetObjectFunction(funobj);
JSString *name = ::JS_InternJSString(cx, ::JS_GetFunctionId(fun));
NPIdentifier id = StringToNPIdentifier(cx, name);

Просмотреть файл

@ -91,8 +91,8 @@ public:
InitClass(JSContext* aCx, JSObject* aObj, JSObject* aParentProto,
bool aMainRuntime)
{
JSObject* proto = JS_InitClass(aCx, aObj, aParentProto, &sClass, Construct,
0, sProperties, sFunctions, NULL, NULL);
JSObject* proto = js::InitClassWithReserved(aCx, aObj, aParentProto, &sClass, Construct,
0, sProperties, sFunctions, NULL, NULL);
if (!proto) {
return NULL;
}
@ -102,11 +102,10 @@ public:
parent->AssertIsOnWorkerThread();
JSObject* constructor = JS_GetConstructor(aCx, proto);
if (!constructor ||
!JS_SetReservedSlot(aCx, constructor, CONSTRUCTOR_SLOT_PARENT,
PRIVATE_TO_JSVAL(parent))) {
if (!constructor)
return NULL;
}
js::SetFunctionNativeReserved(constructor, CONSTRUCTOR_SLOT_PARENT,
PRIVATE_TO_JSVAL(parent));
}
return proto;
@ -153,11 +152,8 @@ protected:
return false;
}
jsval priv;
if (!JS_GetReservedSlot(aCx, JSVAL_TO_OBJECT(JS_CALLEE(aCx, aVp)),
CONSTRUCTOR_SLOT_PARENT, &priv)) {
return false;
}
jsval priv = js::GetFunctionNativeReserved(JSVAL_TO_OBJECT(JS_CALLEE(aCx, aVp)),
CONSTRUCTOR_SLOT_PARENT);
RuntimeService* runtimeService;
WorkerPrivate* parent;
@ -345,8 +341,8 @@ public:
InitClass(JSContext* aCx, JSObject* aObj, JSObject* aParentProto,
bool aMainRuntime)
{
JSObject* proto = JS_InitClass(aCx, aObj, aParentProto, &sClass, Construct,
0, NULL, NULL, NULL, NULL);
JSObject* proto = js::InitClassWithReserved(aCx, aObj, aParentProto, &sClass, Construct,
0, NULL, NULL, NULL, NULL);
if (!proto) {
return NULL;
}
@ -356,11 +352,10 @@ public:
parent->AssertIsOnWorkerThread();
JSObject* constructor = JS_GetConstructor(aCx, proto);
if (!constructor ||
!JS_SetReservedSlot(aCx, constructor, CONSTRUCTOR_SLOT_PARENT,
PRIVATE_TO_JSVAL(parent))) {
if (!constructor)
return NULL;
}
js::SetFunctionNativeReserved(constructor, CONSTRUCTOR_SLOT_PARENT,
PRIVATE_TO_JSVAL(parent));
}
return proto;

Просмотреть файл

@ -271,11 +271,8 @@ private:
JSObject* wrapper = JSVAL_TO_OBJECT(JS_CALLEE(aCx, aVp));
JS_ASSERT(JS_ObjectIsFunction(aCx, wrapper));
jsval scope, listener;
if (!JS_GetReservedSlot(aCx, wrapper, SLOT_wrappedScope, &scope) ||
!JS_GetReservedSlot(aCx, wrapper, SLOT_wrappedFunction, &listener)) {
return false;
}
jsval scope = js::GetFunctionNativeReserved(wrapper, SLOT_wrappedScope);
jsval listener = js::GetFunctionNativeReserved(wrapper, SLOT_wrappedFunction);
JS_ASSERT(JSVAL_IS_OBJECT(scope));
@ -319,11 +316,8 @@ private:
JS_ASSERT(JSVAL_IS_OBJECT(adaptor));
jsval listener;
if (!JS_GetReservedSlot(aCx, JSVAL_TO_OBJECT(adaptor), SLOT_wrappedFunction,
&listener)) {
return false;
}
jsval listener = js::GetFunctionNativeReserved(JSVAL_TO_OBJECT(adaptor),
SLOT_wrappedFunction);
*aVp = listener;
return true;
@ -339,8 +333,8 @@ private:
return false;
}
JSFunction* adaptor = JS_NewFunction(aCx, UnwrapErrorEvent, 1, 0,
JS_GetGlobalObject(aCx), "unwrap");
JSFunction* adaptor = js::NewFunctionWithReserved(aCx, UnwrapErrorEvent, 1, 0,
JS_GetGlobalObject(aCx), "unwrap");
if (!adaptor) {
return false;
}
@ -350,11 +344,9 @@ private:
return false;
}
if (!JS_SetReservedSlot(aCx, listener, SLOT_wrappedScope,
OBJECT_TO_JSVAL(aObj)) ||
!JS_SetReservedSlot(aCx, listener, SLOT_wrappedFunction, *aVp)) {
return false;
}
js::SetFunctionNativeReserved(listener, SLOT_wrappedScope,
OBJECT_TO_JSVAL(aObj));
js::SetFunctionNativeReserved(listener, SLOT_wrappedFunction, *aVp);
jsval val = OBJECT_TO_JSVAL(listener);
return scope->SetEventListenerOnEventTarget(aCx, name + 2, &val);

Просмотреть файл

@ -747,7 +747,7 @@ jsd_GetValueParent(JSDContext* jsdc, JSDValue* jsdval)
return NULL;
}
parent = JS_GetParent(jsdc->dumbContext,obj);
parent = JS_GetParentOrScopeChain(jsdc->dumbContext,obj);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(jsdc->dumbContext);
if(!parent)

Просмотреть файл

@ -357,6 +357,7 @@ class HashTable : private AllocPolicy
public:
HashTable(AllocPolicy ap)
: AllocPolicy(ap),
hashShift(sHashBits),
entryCount(0),
gen(0),
removedCount(0),

Просмотреть файл

@ -564,6 +564,11 @@ public:
return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label);
}
ptrdiff_t differenceBetween(DataLabelPtr from, Label to)
{
return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label);
}
ptrdiff_t differenceBetween(DataLabelPtr from, Jump to)
{
return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_jmp);

Просмотреть файл

@ -334,7 +334,7 @@ public:
return branch32(cond, left, Imm32(right));
}
Jump branchPtr(Condition cond, AbsoluteAddress left, ImmPtr right)
Jump branchPtr(Condition cond, AbsoluteAddress left, ImmPtr right, RegisterID scratch)
{
return branch32(cond, left, Imm32(right));
}

Просмотреть файл

@ -431,6 +431,12 @@ public:
return branchPtr(cond, Address(scratchRegister), right);
}
Jump branchPtr(Condition cond, AbsoluteAddress left, ImmPtr right, RegisterID scratch)
{
move(ImmPtr(left.m_ptr), scratch);
return branchPtr(cond, Address(scratch), right);
}
Jump branchPtr(Condition cond, Address left, RegisterID right)
{
m_assembler.cmpq_rm(right, left.offset, left.base);

Просмотреть файл

@ -110,9 +110,9 @@ public:
MacroAssembler::repatchInt32(dataLabel32, value);
}
void repatch(CodeLocationDataLabelPtr dataLabelPtr, void* value)
void repatch(CodeLocationDataLabelPtr dataLabelPtr, const void* value)
{
MacroAssembler::repatchPointer(dataLabelPtr, value);
MacroAssembler::repatchPointer(dataLabelPtr, (void*) value);
}
void repatchLoadPtrToLEA(CodeLocationInstruction instruction)

Просмотреть файл

@ -641,7 +641,7 @@ InitTypeConstructor(JSContext* cx,
JSObject*& typeProto,
JSObject*& dataProto)
{
JSFunction* fun = JS_DefineFunction(cx, parent, spec.name, spec.call,
JSFunction* fun = js::DefineFunctionWithReserved(cx, parent, spec.name, spec.call,
spec.nargs, spec.flags);
if (!fun)
return false;
@ -672,8 +672,7 @@ InitTypeConstructor(JSContext* cx,
// Stash ctypes.{Pointer,Array,Struct}Type.prototype on a reserved slot of
// the type constructor, for faster lookup.
if (!JS_SetReservedSlot(cx, obj, SLOT_FN_CTORPROTO, OBJECT_TO_JSVAL(typeProto)))
return false;
js::SetFunctionNativeReserved(obj, SLOT_FN_CTORPROTO, OBJECT_TO_JSVAL(typeProto));
// Create an object to serve as the common ancestor for all CData objects
// created from the given type constructor. This has ctypes.CData.prototype
@ -725,14 +724,18 @@ InitInt64Class(JSContext* cx,
if (!JS_FreezeObject(cx, ctor))
return NULL;
// Stash ctypes.{Int64,UInt64}.prototype on a reserved slot of the 'join'
// function.
jsval join;
ASSERT_OK(JS_GetProperty(cx, ctor, "join", &join));
if (!JS_SetReservedSlot(cx, JSVAL_TO_OBJECT(join), SLOT_FN_INT64PROTO,
OBJECT_TO_JSVAL(prototype)))
// Redefine the 'join' function as an extended native and stash
// ctypes.{Int64,UInt64}.prototype in a reserved slot of the new function.
JS_ASSERT(clasp == &sInt64ProtoClass || clasp == &sUInt64ProtoClass);
JSNative native = (clasp == &sInt64ProtoClass) ? Int64::Join : UInt64::Join;
JSFunction* fun = js::DefineFunctionWithReserved(cx, ctor, "join", native,
2, CTYPESFN_FLAGS);
if (!fun)
return NULL;
js::SetFunctionNativeReserved(fun, SLOT_FN_INT64PROTO,
OBJECT_TO_JSVAL(prototype));
if (!JS_FreezeObject(cx, prototype))
return NULL;
@ -3045,8 +3048,7 @@ CType::GetProtoFromCtor(JSContext* cx, JSObject* obj, CTypeProtoSlot slot)
{
// Get ctypes.{Pointer,Array,Struct}Type.prototype from a reserved slot
// on the type constructor.
jsval protoslot;
ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FN_CTORPROTO, &protoslot));
jsval protoslot = js::GetFunctionNativeReserved(obj, SLOT_FN_CTORPROTO);
JSObject* proto = JSVAL_TO_OBJECT(protoslot);
JS_ASSERT(proto);
JS_ASSERT(CType::IsCTypeProto(cx, proto));
@ -6291,8 +6293,7 @@ Int64::Join(JSContext* cx, uintN argc, jsval* vp)
// Get Int64.prototype from the function's reserved slot.
JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, callee, SLOT_FN_INT64PROTO, &slot));
jsval slot = js::GetFunctionNativeReserved(callee, SLOT_FN_INT64PROTO);
JSObject* proto = JSVAL_TO_OBJECT(slot);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sInt64ProtoClass);
@ -6459,8 +6460,7 @@ UInt64::Join(JSContext* cx, uintN argc, jsval* vp)
// Get UInt64.prototype from the function's reserved slot.
JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, callee, SLOT_FN_INT64PROTO, &slot));
jsval slot = js::GetFunctionNativeReserved(callee, SLOT_FN_INT64PROTO);
JSObject* proto = JSVAL_TO_OBJECT(slot);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sUInt64ProtoClass);

Просмотреть файл

@ -93,7 +93,7 @@ DefineGlobals(JSContext *cx, GlobalScope &globalScope, JSScript *script)
JSPROP_ENUMERATE | JSPROP_PERMANENT, 0, 0, DNP_SKIP_TYPE);
if (!shape)
return false;
def.knownSlot = shape->slot;
def.knownSlot = shape->slot();
}
Vector<JSScript *, 16> worklist(cx);
@ -123,10 +123,10 @@ DefineGlobals(JSContext *cx, GlobalScope &globalScope, JSScript *script)
JSObject *obj = arr->vector[i];
if (!obj->isFunction())
continue;
JSFunction *fun = obj->getFunctionPrivate();
JSFunction *fun = obj->toFunction();
JS_ASSERT(fun->isInterpreted());
JSScript *inner = fun->script();
if (outer->isHeavyweightFunction) {
if (outer->function() && outer->function()->isHeavyweight()) {
outer->isOuterFunction = true;
inner->isInnerFunction = true;
}

Просмотреть файл

@ -1383,7 +1383,7 @@ frontend::PushBlockScope(TreeContext *tc, StmtInfo *stmt, ObjectBox *blockBox, p
PushStatement(tc, stmt, STMT_BLOCK, top);
stmt->flags |= SIF_SCOPE;
blockBox->parent = tc->blockChainBox;
blockBox->object->setParent(tc->blockChain());
blockBox->object->setStaticBlockScopeChain(tc->blockChain());
stmt->downScope = tc->topScopeStmt;
tc->topScopeStmt = stmt;
tc->blockChainBox = blockBox;
@ -1721,7 +1721,7 @@ frontend::LexicalLookup(TreeContext *tc, JSAtom *atom, jsint *slotp, StmtInfo *s
if (slotp) {
JS_ASSERT(obj->getSlot(JSSLOT_BLOCK_DEPTH).isInt32());
*slotp = obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32() + shape->shortid;
*slotp = obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32() + shape->shortid();
}
return stmt;
}
@ -1781,8 +1781,8 @@ LookupCompileTimeConstant(JSContext *cx, BytecodeEmitter *bce, JSAtom *atom, Val
* from our variable object here.
*/
if (!shape->writable() && !shape->configurable() &&
shape->hasDefaultGetter() && obj->containsSlot(shape->slot)) {
*constp = obj->getSlot(shape->slot);
shape->hasDefaultGetter() && obj->containsSlot(shape->slot())) {
*constp = obj->getSlot(shape->slot());
}
}
@ -2020,8 +2020,13 @@ EmitEnterBlock(JSContext *cx, ParseNode *pn, BytecodeEmitter *bce)
* js::Bindings::extensibleParents.
*/
if ((bce->flags & TCF_FUN_EXTENSIBLE_SCOPE) ||
bce->bindings.extensibleParents())
blockObj->setBlockOwnShape(cx);
bce->bindings.extensibleParents()) {
HeapPtrShape shape;
shape.init(blockObj->lastProperty());
if (!Shape::setExtensibleParents(cx, &shape))
return false;
blockObj->setLastPropertyInfallible(shape);
}
return true;
}
@ -2332,7 +2337,6 @@ BindNameToSlot(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
JS_ASSERT(bce->inFunction());
JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, bce->roLexdeps->lookup(atom));
JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
JS_ASSERT(bce->fun()->u.i.skipmin <= skip);
/*
* If op is a mutating opcode, this upvar's lookup skips too many levels,
@ -3844,13 +3848,6 @@ frontend::EmitFunctionScript(JSContext *cx, BytecodeEmitter *bce, ParseNode *bod
bce->switchToMain();
}
if (bce->flags & TCF_FUN_UNBRAND_THIS) {
bce->switchToProlog();
if (Emit1(cx, bce, JSOP_UNBRANDTHIS) < 0)
return false;
bce->switchToMain();
}
return EmitTree(cx, bce, body) &&
Emit1(cx, bce, JSOP_STOP) >= 0 &&
JSScript::NewScriptFromEmitter(cx, bce);
@ -4756,7 +4753,7 @@ ParseNode::getConstantValue(JSContext *cx, bool strictChecks, Value *vp)
case PNK_RC: {
JS_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
gc::AllocKind kind = GuessObjectGCKind(pn_count, false);
gc::AllocKind kind = GuessObjectGCKind(pn_count);
JSObject *obj = NewBuiltinClassInstance(cx, &ObjectClass, kind);
if (!obj)
return false;
@ -5447,6 +5444,40 @@ EmitWith(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
return PopStatementBCE(cx, bce);
}
static bool
SetMethodFunction(JSContext *cx, FunctionBox *funbox, JSAtom *atom)
{
/*
* Replace a boxed function with a new one with a method atom. Methods
* require a function with the extended size finalize kind, which normal
* functions don't have. We don't eagerly allocate functions with the
* expanded size for boxed functions, as most functions are not methods.
*/
JSFunction *fun = js_NewFunction(cx, NULL, NULL,
funbox->function()->nargs,
funbox->function()->flags,
funbox->function()->getParent(),
funbox->function()->atom,
JSFunction::ExtendedFinalizeKind);
if (!fun)
return false;
JSScript *script = funbox->function()->script();
if (script) {
fun->setScript(script);
if (!script->typeSetFunction(cx, fun))
return false;
}
JS_ASSERT(funbox->function()->joinable());
fun->setJoinable();
fun->setMethodAtom(atom);
funbox->object = fun;
return true;
}
static bool
EmitForIn(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top)
{
@ -6329,6 +6360,8 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
pn2->pn_left->isOp(JSOP_SETPROP) &&
pn2->pn_right->isOp(JSOP_LAMBDA) &&
pn2->pn_right->pn_funbox->joinable()) {
if (!SetMethodFunction(cx, pn2->pn_right->pn_funbox, pn2->pn_left->pn_atom))
return JS_FALSE;
pn2->pn_left->setOp(JSOP_SETMETHOD);
}
if (!EmitTree(cx, bce, pn2))
@ -7096,7 +7129,7 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
*/
JSObject *obj = NULL;
if (!bce->hasSharps() && bce->compileAndGo()) {
gc::AllocKind kind = GuessObjectGCKind(pn->pn_count, false);
gc::AllocKind kind = GuessObjectGCKind(pn->pn_count);
obj = NewBuiltinClassInstance(cx, &ObjectClass, kind);
if (!obj)
return JS_FALSE;
@ -7144,6 +7177,8 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
obj = NULL;
op = JSOP_INITMETHOD;
pn2->setOp(op);
if (!SetMethodFunction(cx, init->pn_funbox, pn3->pn_atom))
return JS_FALSE;
} else {
/*
* Disable NEWOBJECT on initializers that set __proto__, which has
@ -7171,11 +7206,6 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
}
}
if (bce->funbox && bce->funbox->shouldUnbrand(methodInits, slowMethodInits)) {
obj = NULL;
if (Emit1(cx, bce, JSOP_UNBRAND) < 0)
return JS_FALSE;
}
if (!EmitEndInit(cx, bce, pn->pn_count))
return JS_FALSE;

Просмотреть файл

@ -203,15 +203,6 @@ struct StmtInfo {
/* bits 0x40000 and 0x80000 are unused */
/*
* Flag signifying that the current function seems to be a constructor that
* sets this.foo to define "methods", at least one of which can't be a null
* closure, so we should avoid over-specializing property cache entries and
* trace inlining guards to method function object identity, which will vary
* per instance.
*/
#define TCF_FUN_UNBRAND_THIS 0x100000
/*
* "Module pattern", i.e., a lambda that is immediately applied and the whole
* of an expression statement.

Просмотреть файл

@ -133,20 +133,6 @@ FunctionBox::scopeIsExtensible() const
return tcflags & TCF_FUN_EXTENSIBLE_SCOPE;
}
bool
FunctionBox::shouldUnbrand(uintN methods, uintN slowMethods) const
{
if (slowMethods != 0) {
for (const FunctionBox *funbox = this; funbox; funbox = funbox->parent) {
if (!(funbox->tcflags & TCF_FUN_MODULE_PATTERN))
return true;
if (funbox->inLoop)
return true;
}
}
return false;
}
/* Add |node| to |parser|'s free node list. */
void
ParseNodeAllocator::freeNode(ParseNode *pn)

Просмотреть файл

@ -1271,18 +1271,6 @@ struct FunctionBox : public ObjectBox
* ancestor?
*/
bool scopeIsExtensible() const;
/*
* Unbrand an object being initialized or constructed if any method cannot
* be joined to one compiler-created null closure shared among N different
* closure environments.
*
* We despecialize from caching function objects, caching slots or shapes
* instead, because an unbranded object may still have joined methods (for
* which shape->isMethod), since PropertyCache::fill gives precedence to
* joined methods over branded methods.
*/
bool shouldUnbrand(uintN methods, uintN slowMethods) const;
};
struct FunctionBoxQueue {

Просмотреть файл

@ -533,7 +533,7 @@ js::CheckStrictParameters(JSContext *cx, TreeContext *tc)
/* Start with lastVariable(), not lastArgument(), for destructuring. */
for (Shape::Range r = tc->bindings.lastVariable(); !r.empty(); r.popFront()) {
jsid id = r.front().propid;
jsid id = r.front().propid();
if (!JSID_IS_ATOM(id))
continue;
@ -932,8 +932,11 @@ Parser::newFunction(TreeContext *tc, JSAtom *atom, FunctionSyntaxKind kind)
JSFUN_INTERPRETED | (kind == Expression ? JSFUN_LAMBDA : 0),
parent, atom);
if (fun && !tc->compileAndGo()) {
fun->clearParent();
fun->clearType();
if (!fun->clearParent(context))
return NULL;
if (!fun->clearType(context))
return NULL;
fun->setEnvironment(NULL);
}
return fun;
}
@ -1169,7 +1172,7 @@ LeaveFunction(ParseNode *fn, TreeContext *funtc, PropertyName *funName = NULL,
* we create it eagerly whenever parameters are (or might, in the case of
* calls to eval) be assigned.
*/
if (funtc->inStrictMode() && funbox->object->getFunctionPrivate()->nargs > 0) {
if (funtc->inStrictMode() && funbox->object->toFunction()->nargs > 0) {
AtomDeclsIter iter(&funtc->decls);
Definition *dn;
@ -1951,7 +1954,7 @@ BindLet(JSContext *cx, BindData *data, JSAtom *atom, TreeContext *tc)
* BytecodeEmitter.cpp:EmitEnterBlock so they don't tie up unused space
* in the so-called "static" prototype Block.
*/
blockObj->setSlot(shape->slot, PrivateValue(pn));
blockObj->setSlot(shape->slot(), PrivateValue(pn));
return true;
}
@ -1965,7 +1968,7 @@ PopStatement(TreeContext *tc)
JS_ASSERT(!obj->isClonedBlock());
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
JSAtom *atom = JSID_TO_ATOM(r.front().propid);
JSAtom *atom = JSID_TO_ATOM(r.front().propid());
/* Beware the empty destructuring dummy. */
if (atom == tc->parser->context->runtime->atomState.emptyAtom)
@ -1973,14 +1976,7 @@ PopStatement(TreeContext *tc)
tc->decls.remove(atom);
}
/*
* js_CloneBlockObject requires obj's shape to be frozen. Compare
* Bindings::makeImmutable.
*
* (This is a second pass over the shapes, if obj has a dictionary, but
* that is rare.)
*/
obj->lastProp->freezeIfDictionary();
JS_ASSERT(!obj->inDictionaryMode());
}
PopStatementTC(tc);
}
@ -2051,7 +2047,7 @@ DefineGlobal(ParseNode *pn, BytecodeEmitter *bce, PropertyName *name)
return true;
}
def = GlobalScope::GlobalDef(shape->slot);
def = GlobalScope::GlobalDef(shape->slot());
} else {
def = GlobalScope::GlobalDef(name, funbox);
}
@ -3687,7 +3683,7 @@ Parser::letStatement()
stmt->downScope = tc->topScopeStmt;
tc->topScopeStmt = stmt;
obj->setParent(tc->blockChain());
obj->setStaticBlockScopeChain(tc->blockChain());
blockbox->parent = tc->blockChainBox;
tc->blockChainBox = blockbox;
stmt->blockBox = blockbox;
@ -7156,8 +7152,10 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
return NULL;
if (!tc->compileAndGo()) {
reobj->clearParent();
reobj->clearType();
if (!reobj->clearParent(context))
return NULL;
if (!reobj->clearType(context))
return NULL;
}
pn->pn_objbox = tc->parser->newObjectBox(reobj);

Просмотреть файл

@ -164,7 +164,6 @@ FindFunArgs(FunctionBox *funbox, int level, FunctionBoxQueue *queue)
do {
ParseNode *fn = funbox->node;
JS_ASSERT(fn->isArity(PN_FUNC));
JSFunction *fun = funbox->function();
int fnlevel = level;
/*
@ -245,12 +244,10 @@ FindFunArgs(FunctionBox *funbox, int level, FunctionBoxQueue *queue)
/*
* Finally, after we've traversed all of the current function's kids,
* minimize fun's skipmin against our accumulated skipmin. Do likewise
* with allskipmin, but minimize across funbox and all of its siblings,
* to compute our return value.
* minimize allskipmin against our accumulated skipmin. Minimize across
* funbox and all of its siblings, to compute our return value.
*/
if (skipmin != UpvarCookie::FREE_LEVEL) {
fun->u.i.skipmin = skipmin;
if (skipmin < allskipmin)
allskipmin = skipmin;
}
@ -512,46 +509,6 @@ FlagHeavyweights(Definition *dn, FunctionBox *funbox, uint32 *tcflags)
*tcflags |= TCF_FUN_HEAVYWEIGHT;
}
static void
ConsiderUnbranding(FunctionBox *funbox)
{
/*
* We've already recursively set our kids' kinds, which also classifies
* enclosing functions holding upvars referenced in those descendants'
* bodies. So now we can check our "methods".
*
* Despecialize from branded method-identity-based shape to shape- or
* slot-based shape if this function smells like a constructor and too many
* of its methods are *not* joinable null closures (i.e., they have one or
* more upvars fetched via the display).
*/
bool returnsExpr = !!(funbox->tcflags & TCF_RETURN_EXPR);
#if JS_HAS_EXPR_CLOSURES
{
ParseNode *pn2 = funbox->node->pn_body;
if (pn2->isKind(PNK_UPVARS))
pn2 = pn2->pn_tree;
if (pn2->isKind(PNK_ARGSBODY))
pn2 = pn2->last();
if (!pn2->isKind(PNK_STATEMENTLIST))
returnsExpr = true;
}
#endif
if (!returnsExpr) {
uintN methodSets = 0, slowMethodSets = 0;
for (ParseNode *method = funbox->methods; method; method = method->pn_link) {
JS_ASSERT(method->isOp(JSOP_LAMBDA) || method->isOp(JSOP_LAMBDA_FC));
++methodSets;
if (!method->pn_funbox->joinable())
++slowMethodSets;
}
if (funbox->shouldUnbrand(methodSets, slowMethodSets))
funbox->tcflags |= TCF_FUN_UNBRAND_THIS;
}
}
static void
SetFunctionKinds(FunctionBox *funbox, uint32 *tcflags, bool isDirectEval)
{
@ -559,10 +516,8 @@ SetFunctionKinds(FunctionBox *funbox, uint32 *tcflags, bool isDirectEval)
ParseNode *fn = funbox->node;
ParseNode *pn = fn->pn_body;
if (funbox->kids) {
if (funbox->kids)
SetFunctionKinds(funbox->kids, tcflags, isDirectEval);
ConsiderUnbranding(funbox);
}
JSFunction *fun = funbox->function();
@ -674,8 +629,8 @@ SetFunctionKinds(FunctionBox *funbox, uint32 *tcflags, bool isDirectEval)
* must have their OWN_SHAPE flags set; the comments for
* js::Bindings::extensibleParents explain why.
*/
static void
MarkExtensibleScopeDescendants(FunctionBox *funbox, bool hasExtensibleParent)
static bool
MarkExtensibleScopeDescendants(JSContext *context, FunctionBox *funbox, bool hasExtensibleParent)
{
for (; funbox; funbox = funbox->siblings) {
/*
@ -685,14 +640,20 @@ MarkExtensibleScopeDescendants(FunctionBox *funbox, bool hasExtensibleParent)
*/
JS_ASSERT(!funbox->bindings.extensibleParents());
if (hasExtensibleParent)
funbox->bindings.setExtensibleParents();
if (hasExtensibleParent) {
if (!funbox->bindings.setExtensibleParents(context))
return false;
}
if (funbox->kids) {
MarkExtensibleScopeDescendants(funbox->kids,
hasExtensibleParent || funbox->scopeIsExtensible());
if (!MarkExtensibleScopeDescendants(context, funbox->kids,
hasExtensibleParent || funbox->scopeIsExtensible())) {
return false;
}
}
}
return true;
}
bool
@ -703,7 +664,8 @@ frontend::AnalyzeFunctions(TreeContext *tc)
return true;
if (!MarkFunArgs(tc->parser->context, tc->functionList, tc->parser->functionCount))
return false;
MarkExtensibleScopeDescendants(tc->functionList, false);
if (!MarkExtensibleScopeDescendants(tc->parser->context, tc->functionList, false))
return false;
bool isDirectEval = !!tc->parser->callerFrame;
SetFunctionKinds(tc->functionList, &tc->flags, isDirectEval);
return true;

Просмотреть файл

@ -271,7 +271,8 @@ typedef HeapPtr<JSFunction> HeapPtrFunction;
typedef HeapPtr<JSString> HeapPtrString;
typedef HeapPtr<JSScript> HeapPtrScript;
typedef HeapPtr<Shape> HeapPtrShape;
typedef HeapPtr<const Shape> HeapPtrConstShape;
typedef HeapPtr<BaseShape> HeapPtrBaseShape;
typedef HeapPtr<types::TypeObject> HeapPtrTypeObject;
typedef HeapPtr<JSXML> HeapPtrXML;
/* Useful for hashtables with a HeapPtr as key. */

Просмотреть файл

@ -192,7 +192,6 @@ Statistics::beginGC(JSCompartment *comp, Reason reason)
Probes::GCStart(compartment);
GCCrashData crashData;
crashData.isRegen = runtime->shapeGen & SHAPE_OVERFLOW_BIT;
crashData.isCompartment = !!compartment;
crash::SaveCrashData(crash::JS_CRASH_TAG_GC, &crashData, sizeof(crashData));
}
@ -277,8 +276,7 @@ Statistics::endGC()
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
(*cb)(JS_TELEMETRY_GC_REASON, triggerReason);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, compartment ? 1 : 0);
(*cb)(JS_TELEMETRY_GC_IS_SHAPE_REGEN,
runtime->shapeGen & SHAPE_OVERFLOW_BIT ? 1 : 0);
(*cb)(JS_TELEMETRY_GC_IS_SHAPE_REGEN, 0);
(*cb)(JS_TELEMETRY_GC_MS, t(PHASE_GC));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(PHASE_MARK));
(*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(PHASE_SWEEP));

Просмотреть файл

@ -0,0 +1,4 @@
var o4 = Object.freeze({
set: function(summary) {}
});

Просмотреть файл

@ -0,0 +1,7 @@
a = "".__proto__
b = uneval().__proto__
for (var i = 0; i < 2; i++) {
a.__defineSetter__("valueOf", function() {})
a + ""
delete b.valueOf
}

Просмотреть файл

@ -0,0 +1,4 @@
for (let j = 0; j < (20); ++(__lookupSetter__)) {
function g() { j; }
j++;
}

Просмотреть файл

@ -0,0 +1,13 @@
Function.prototype.__proto__["p"] = 3
c = [].__proto__
c[5] = 3
Namespace.prototype.__proto__[4] = function() {}
gc()
Function("\
{\
function f(d) {}\
for each(let z in[0]) {\
f(z)\
}\
}\
")()

Просмотреть файл

@ -0,0 +1 @@
for (let x in [<y/>.(let(x) function() {})]) {}

Просмотреть файл

@ -0,0 +1,3 @@
Object.defineProperty(Namespace.prototype, "toString", {
enumerable: true
})

Просмотреть файл

@ -0,0 +1,12 @@
function f(s) {
eval(s);
return function() {
with({}) {};
return b;
};
}
var b = 1;
var g1 = f("");
var g2 = f("var b = 2;");
g1('');
assertEq(g2(''), 2);

Просмотреть файл

@ -0,0 +1,11 @@
c = (0).__proto__
function f(o) {
o.__proto__ = null
for (x in o) {}
}
for (i = 0; i < 9; i++) {
f(c)
Function.prototype.__proto__.__proto__ = c
for (x in Function.prototype.__proto__) {}
f(Math.__proto__)
}

Просмотреть файл

@ -0,0 +1,13 @@
// |jit-test| error: TypeError
function f(o) {
for (j = 0; j < 9; j++) {
if (j) {
o.__proto__ = null
}
for (v in o) {}
}
}
for (i = 0; i < 9; i++) {
(new Boolean).__proto__.__defineGetter__("toString", function() {})
f(Boolean.prototype)
}

Просмотреть файл

@ -3,7 +3,7 @@ setDebug(true);
x = "notset";
function main() {
/* The JSOP_STOP in main. */
a = { valueOf: function () { trap(main, 58, "success()"); } };
a = { valueOf: function () { trap(main, 57, "success()"); } };
b = "";
eval();
a + b;

Просмотреть файл

@ -0,0 +1,17 @@
function TestCase(n, d, e, a)
this.name=n;
function reportCompare (expected, actual, description) {
new TestCase
}
reportCompare(true, "isGenerator" in Function, "Function.prototype.isGenerator present");
var p = Proxy.create({
has : function(id) {}
});
function test() {
Object.prototype.__proto__=null
if (new TestCase)
Object.prototype.__proto__=p
}
test();
new TestCase;
test()

Просмотреть файл

@ -0,0 +1,7 @@
a = []
function f(o) {
o[5] = {}
}
for (var i = 0; i < 20; i++) {
with(a) f(a)
}

Просмотреть файл

@ -174,7 +174,7 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx)
LifoAlloc &tla = cx->typeLifoAlloc();
unsigned length = script->length;
unsigned nargs = script->hasFunction ? script->function()->nargs : 0;
unsigned nargs = script->function() ? script->function()->nargs : 0;
numSlots = TotalSlots(script);
@ -226,15 +226,16 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx)
if (cx->compartment->debugMode())
usesReturnValue_ = true;
bool heavyweight = script->function() && script->function()->isHeavyweight();
isInlineable = true;
if (script->nClosedArgs || script->nClosedVars ||
(script->hasFunction && script->function()->isHeavyweight()) ||
if (script->nClosedArgs || script->nClosedVars || heavyweight ||
script->usesEval || script->usesArguments || cx->compartment->debugMode()) {
isInlineable = false;
}
modifiesArguments_ = false;
if (script->nClosedArgs || (script->hasFunction && script->function()->isHeavyweight()))
if (script->nClosedArgs || heavyweight)
modifiesArguments_ = true;
canTrackVars = true;

Просмотреть файл

@ -394,7 +394,7 @@ static inline uint32 ArgSlot(uint32 arg) {
return 2 + arg;
}
static inline uint32 LocalSlot(JSScript *script, uint32 local) {
return 2 + (script->hasFunction ? script->function()->nargs : 0) + local;
return 2 + (script->function() ? script->function()->nargs : 0) + local;
}
static inline uint32 TotalSlots(JSScript *script) {
return LocalSlot(script, 0) + script->nfixed;

Просмотреть файл

@ -6,8 +6,6 @@
#include "vm/Stack-inl.h"
#include "jsobjinlines.h"
using namespace js;
static const char NORMAL_ZERO[] =

Просмотреть файл

@ -8,8 +8,6 @@
#include "jsobj.h"
#include "jswrapper.h"
#include "jsobjinlines.h"
struct OuterWrapper : js::Wrapper
{
OuterWrapper() : Wrapper(0) {}

Просмотреть файл

@ -50,13 +50,8 @@ BEGIN_TEST(testConservativeGC)
bool checkObjectFields(JSObject *savedCopy, JSObject *obj)
{
/* Ignore fields which are unstable across GCs. */
CHECK(savedCopy->lastProp == obj->lastProp);
CHECK(savedCopy->getClass() == obj->getClass());
CHECK(savedCopy->flags == obj->flags);
CHECK(savedCopy->newType == obj->newType);
CHECK(savedCopy->lastProperty() == obj->lastProperty());
CHECK(savedCopy->getProto() == obj->getProto());
CHECK(savedCopy->parent == obj->parent);
CHECK(savedCopy->privateData == obj->privateData);
return true;
}

Просмотреть файл

@ -9,8 +9,6 @@
#include "jsnum.h"
#include "jsstr.h"
#include "jsobjinlines.h"
#include "vm/String-inl.h"
using namespace mozilla;

Просмотреть файл

@ -5,6 +5,8 @@
#include "tests.h"
#include "jsfun.h" // for js::IsInternalFunctionObject
#include "jsobjinlines.h"
BEGIN_TEST(testLookup_bug522590)
{
// Define a function that makes method-bearing objects.
@ -26,7 +28,7 @@ BEGIN_TEST(testLookup_bug522590)
JSObject *funobj = JSVAL_TO_OBJECT(r);
CHECK(funobj->isFunction());
CHECK(!js::IsInternalFunctionObject(funobj));
CHECK(funobj->getFunctionPrivate() != (JSFunction *) funobj);
CHECK(funobj->toFunction()->isClonedMethod());
return true;
}

Просмотреть файл

@ -3,6 +3,7 @@
#include "jscntxt.h"
#include "jscntxtinlines.h"
#include "jsobjinlines.h"
using namespace js;

Просмотреть файл

@ -336,11 +336,11 @@ JS_ConvertArgumentsVA(JSContext *cx, uintN argc, jsval *argv, const char *format
*va_arg(ap, JSObject **) = obj;
break;
case 'f':
obj = js_ValueToFunctionObject(cx, sp, 0);
obj = js_ValueToFunction(cx, sp, 0);
if (!obj)
return JS_FALSE;
*sp = OBJECT_TO_JSVAL(obj);
*va_arg(ap, JSFunction **) = obj->getFunctionPrivate();
*va_arg(ap, JSFunction **) = obj->toFunction();
break;
case 'v':
*va_arg(ap, jsval *) = *sp;
@ -429,7 +429,7 @@ JS_ConvertValue(JSContext *cx, jsval v, JSType type, jsval *vp)
break;
case JSTYPE_FUNCTION:
*vp = v;
obj = js_ValueToFunctionObject(cx, vp, JSV2F_SEARCH_STACK);
obj = js_ValueToFunction(cx, vp, JSV2F_SEARCH_STACK);
ok = (obj != NULL);
break;
case JSTYPE_STRING:
@ -645,7 +645,6 @@ JSRuntime::JSRuntime()
compartmentCallback(NULL),
activityCallback(NULL),
activityCallbackArg(NULL),
protoHazardShape(0),
gcSystemAvailableChunkListHead(NULL),
gcUserAvailableChunkListHead(NULL),
gcKeepAtoms(0),
@ -672,7 +671,6 @@ JSRuntime::JSRuntime()
gcPoke(false),
gcMarkAndSweep(false),
gcRunning(false),
gcRegenShapes(false),
#ifdef JS_GC_ZEAL
gcZeal_(0),
gcZealFrequency(0),
@ -726,7 +724,6 @@ JSRuntime::JSRuntime()
threadData(thisFromCtor()),
#endif
trustedPrincipals_(NULL),
shapeGen(0),
wrapObjectCallback(NULL),
preWrapObjectCallback(NULL),
inOOMReport(0)
@ -2383,6 +2380,10 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing,
name = "shape";
break;
case JSTRACE_BASE_SHAPE:
name = "base_shape";
break;
case JSTRACE_TYPE_OBJECT:
name = "type_object";
break;
@ -2411,7 +2412,7 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing,
JSObject *obj = (JSObject *)thing;
Class *clasp = obj->getClass();
if (clasp == &FunctionClass) {
JSFunction *fun = obj->getFunctionPrivate();
JSFunction *fun = obj->toFunction();
if (!fun) {
JS_snprintf(buf, bufsize, "<newborn>");
} else if (fun != obj) {
@ -2446,6 +2447,7 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing,
}
case JSTRACE_SHAPE:
case JSTRACE_BASE_SHAPE:
case JSTRACE_TYPE_OBJECT:
break;
@ -3080,14 +3082,9 @@ JS_GetInstancePrivate(JSContext *cx, JSObject *obj, JSClass *clasp, jsval *argv)
JS_PUBLIC_API(JSObject *)
JS_GetPrototype(JSContext *cx, JSObject *obj)
{
JSObject *proto;
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj);
proto = obj->getProto();
/* Beware ref to dead object (we may be called from obj's finalizer). */
return proto && !proto->isNewborn() ? proto : NULL;
return obj->getProto();
}
JS_PUBLIC_API(JSBool)
@ -3101,21 +3098,19 @@ JS_SetPrototype(JSContext *cx, JSObject *obj, JSObject *proto)
JS_PUBLIC_API(JSObject *)
JS_GetParent(JSContext *cx, JSObject *obj)
{
JS_ASSERT(!obj->isInternalScope());
assertSameCompartment(cx, obj);
JSObject *parent = obj->getParent();
/* Beware ref to dead object (we may be called from obj's finalizer). */
return parent && !parent->isNewborn() ? parent : NULL;
return obj->getParent();
}
JS_PUBLIC_API(JSBool)
JS_SetParent(JSContext *cx, JSObject *obj, JSObject *parent)
{
CHECK_REQUEST(cx);
JS_ASSERT(!obj->isInternalScope());
JS_ASSERT(parent || !obj->getParent());
assertSameCompartment(cx, obj, parent);
obj->setParent(parent);
return true;
return obj->setParent(cx, parent);
}
JS_PUBLIC_API(JSObject *)
@ -3131,8 +3126,7 @@ JS_GetConstructor(JSContext *cx, JSObject *proto)
if (!proto->getProperty(cx, cx->runtime->atomState.constructorAtom, &cval))
return NULL;
}
JSObject *funobj;
if (!IsFunctionObject(cval, &funobj)) {
if (!IsFunctionObject(cval)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NO_CONSTRUCTOR,
proto->getClass()->name);
return NULL;
@ -3206,14 +3200,13 @@ JS_NewObject(JSContext *cx, JSClass *jsclasp, JSObject *proto, JSObject *parent)
JS_ASSERT(clasp != &FunctionClass);
JS_ASSERT(!(clasp->flags & JSCLASS_IS_GLOBAL));
if (proto)
proto->getNewType(cx, NULL, /* markUnknown = */ true);
if (proto && !proto->setNewTypeUnknown(cx))
return NULL;
JSObject *obj = NewNonFunction<WithProto::Class>(cx, clasp, proto, parent);
JSObject *obj = NewObjectWithClassProto(cx, clasp, proto, parent);
if (obj) {
if (clasp->ext.equality)
MarkTypeObjectFlags(cx, obj, OBJECT_FLAG_SPECIAL_EQUALITY);
obj->syncSpecialEquality();
MarkTypeObjectUnknownProperties(cx, obj->type());
}
@ -3235,11 +3228,9 @@ JS_NewObjectWithGivenProto(JSContext *cx, JSClass *jsclasp, JSObject *proto, JSO
JS_ASSERT(clasp != &FunctionClass);
JS_ASSERT(!(clasp->flags & JSCLASS_IS_GLOBAL));
JSObject *obj = NewNonFunction<WithProto::Given>(cx, clasp, proto, parent);
if (obj) {
obj->syncSpecialEquality();
JSObject *obj = NewObjectWithGivenProto(cx, clasp, proto, parent);
if (obj)
MarkTypeObjectUnknownProperties(cx, obj->type());
}
return obj;
}
@ -3349,13 +3340,13 @@ LookupResult(JSContext *cx, JSObject *obj, JSObject *obj2, jsid id,
Shape *shape = (Shape *) prop;
if (shape->isMethod()) {
vp->setObject(shape->methodObject());
vp->setObject(*obj2->nativeGetMethod(shape));
return !!obj2->methodReadBarrier(cx, *shape, vp);
}
/* Peek at the native property's slot value, without doing a Get. */
if (obj2->containsSlot(shape->slot)) {
*vp = obj2->nativeGetSlot(shape->slot);
if (shape->hasSlot()) {
*vp = obj2->nativeGetSlot(shape->slot());
return true;
}
} else {
@ -3654,12 +3645,10 @@ JS_DefineObject(JSContext *cx, JSObject *obj, const char *name, JSClass *jsclasp
if (!clasp)
clasp = &ObjectClass; /* default class is Object */
JSObject *nobj = NewObject<WithProto::Class>(cx, clasp, proto, obj);
JSObject *nobj = NewObjectWithClassProto(cx, clasp, proto, obj);
if (!nobj)
return NULL;
nobj->syncSpecialEquality();
if (!DefineProperty(cx, obj, name, ObjectValue(*nobj), NULL, NULL, attrs, 0, 0))
return NULL;
@ -3726,12 +3715,12 @@ GetPropertyDescriptorById(JSContext *cx, JSObject *obj, jsid id, uintN flags,
if (shape->isMethod()) {
desc->getter = JS_PropertyStub;
desc->setter = JS_StrictPropertyStub;
desc->value.setObject(shape->methodObject());
desc->value.setObject(*obj2->nativeGetMethod(shape));
} else {
desc->getter = shape->getter();
desc->setter = shape->setter();
if (obj2->containsSlot(shape->slot))
desc->value = obj2->nativeGetSlot(shape->slot);
if (shape->hasSlot())
desc->value = obj2->nativeGetSlot(shape->slot());
else
desc->value.setUndefined();
}
@ -4143,7 +4132,7 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj);
iterobj = NewNonFunction<WithProto::Class>(cx, &prop_iter_class, NULL, obj);
iterobj = NewObjectWithClassProto(cx, &prop_iter_class, NULL, obj);
if (!iterobj)
return NULL;
@ -4191,11 +4180,11 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp)
shape = shape->previous();
if (!shape->previous()) {
JS_ASSERT(JSID_IS_EMPTY(shape->propid));
JS_ASSERT(shape->isEmptyShape());
*idp = JSID_VOID;
} else {
iterobj->setPrivate(const_cast<Shape *>(shape->previous()));
*idp = shape->propid;
*idp = shape->propid();
}
} else {
/* Non-native case: use the ida enumerated when iterobj was created. */
@ -4382,9 +4371,9 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
return NULL;
}
JSFunction *fun = funobj->getFunctionPrivate();
JSFunction *fun = funobj->toFunction();
if (!fun->isInterpreted())
return CloneFunctionObject(cx, fun, parent);
return CloneFunctionObject(cx, fun, parent, fun->getAllocKind());
if (fun->script()->compileAndGo) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
@ -4393,7 +4382,7 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
}
if (!fun->isFlatClosure())
return CloneFunctionObject(cx, fun, parent);
return CloneFunctionObject(cx, fun, parent, fun->getAllocKind());
/*
* A flat closure carries its own environment, so why clone it? In case
@ -4424,13 +4413,13 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
JSMSG_BAD_CLONE_FUNOBJ_SCOPE);
return NULL;
}
obj = obj->getParent();
obj = obj->scopeChain();
}
Value v;
if (!obj->getGeneric(cx, r.front().propid, &v))
if (!obj->getGeneric(cx, r.front().propid(), &v))
return NULL;
clone->setFlatClosureUpvar(i, v);
clone->toFunction()->setFlatClosureUpvar(i, v);
}
return clone;
@ -4477,14 +4466,15 @@ JS_IsNativeFunction(JSObject *funobj, JSNative call)
{
if (!funobj->isFunction())
return false;
JSFunction *fun = funobj->getFunctionPrivate();
JSFunction *fun = funobj->toFunction();
return fun->isNative() && fun->native() == call;
}
JSBool
js_generic_native_method_dispatcher(JSContext *cx, uintN argc, Value *vp)
{
JSFunctionSpec *fs = (JSFunctionSpec *) vp->toObject().getReservedSlot(0).toPrivate();
JSFunctionSpec *fs = (JSFunctionSpec *)
vp->toObject().toFunction()->getExtendedSlot(0).toPrivate();
JS_ASSERT((fs->flags & JSFUN_GENERIC_NATIVE) != 0);
if (argc < 1) {
@ -4539,7 +4529,8 @@ JS_DefineFunctions(JSContext *cx, JSObject *obj, JSFunctionSpec *fs)
fun = js_DefineFunction(cx, ctor, ATOM_TO_JSID(atom),
js_generic_native_method_dispatcher,
fs->nargs + 1,
flags);
flags,
JSFunction::ExtendedFinalizeKind);
if (!fun)
return JS_FALSE;
@ -4547,9 +4538,7 @@ JS_DefineFunctions(JSContext *cx, JSObject *obj, JSFunctionSpec *fs)
* As jsapi.h notes, fs must point to storage that lives as long
* as fun->object lives.
*/
Value priv = PrivateValue(fs);
if (!js_SetReservedSlot(cx, fun, 0, priv))
return JS_FALSE;
fun->setExtendedSlot(0, PrivateValue(fs));
}
fun = js_DefineFunction(cx, obj, ATOM_TO_JSID(atom), fs->call, fs->nargs, flags);

Просмотреть файл

@ -3154,8 +3154,7 @@ struct JSClass {
#define JSCLASS_NEW_ENUMERATE (1<<1) /* has JSNewEnumerateOp hook */
#define JSCLASS_NEW_RESOLVE (1<<2) /* has JSNewResolveOp hook */
#define JSCLASS_PRIVATE_IS_NSISUPPORTS (1<<3) /* private is (nsISupports *) */
#define JSCLASS_CONCURRENT_FINALIZER (1<<4) /* finalize is called on background thread */
#define JSCLASS_NEW_RESOLVE_GETS_START (1<<5) /* JSNewResolveOp gets starting
#define JSCLASS_NEW_RESOLVE_GETS_START (1<<4) /* JSNewResolveOp gets starting
object in prototype chain
passed in via *objp in/out
parameter */

Просмотреть файл

@ -281,10 +281,10 @@ JSObject::willBeSparseDenseArray(uintN requiredCapacity, uintN newElementsHint)
JS_ASSERT(isDenseArray());
JS_ASSERT(requiredCapacity > MIN_SPARSE_INDEX);
uintN cap = numSlots();
uintN cap = getDenseArrayCapacity();
JS_ASSERT(requiredCapacity >= cap);
if (requiredCapacity >= JSObject::NSLOTS_LIMIT)
if (requiredCapacity >= JSObject::NELEMENTS_LIMIT)
return true;
uintN minimalDenseCount = requiredCapacity / 4;
@ -352,7 +352,7 @@ JSObject::arrayGetOwnDataElement(JSContext *cx, size_t i, Value *vp)
if (!shape || !shape->isDataDescriptor())
vp->setMagic(JS_ARRAY_HOLE);
else
*vp = getSlot(shape->slot);
*vp = getSlot(shape->slot());
return true;
}
@ -634,9 +634,7 @@ array_length_setter(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value
if (oldinit > newlen)
obj->setDenseArrayInitializedLength(newlen);
if (oldcap > newlen)
obj->shrinkDenseArrayElements(cx, newlen);
if (oldinit > newlen && !cx->typeInferenceEnabled())
obj->backfillDenseArrayHoles(cx);
obj->shrinkElements(cx, newlen);
} else if (oldlen - newlen < (1 << 24)) {
do {
--oldlen;
@ -1219,7 +1217,7 @@ array_fix(JSContext *cx, JSObject *obj, bool *success, AutoIdVector *props)
Class js::ArrayClass = {
"Array",
Class::NON_NATIVE | JSCLASS_HAS_PRIVATE | JSCLASS_HAS_CACHED_PROTO(JSProto_Array),
Class::NON_NATIVE | JSCLASS_HAS_CACHED_PROTO(JSProto_Array),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */
@ -1277,7 +1275,6 @@ Class js::ArrayClass = {
Class js::SlowArrayClass = {
"Array",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_CACHED_PROTO(JSProto_Array),
slowarray_addProperty,
JS_PropertyStub, /* delProperty */
@ -1288,12 +1285,36 @@ Class js::SlowArrayClass = {
JS_ConvertStub
};
bool
JSObject::allocateSlowArrayElements(JSContext *cx)
{
JS_ASSERT(hasClass(&js::SlowArrayClass));
JS_ASSERT(elements == emptyObjectElements);
ObjectElements *header = cx->new_<ObjectElements>(0, 0);
if (!header)
return false;
elements = header->elements();
return true;
}
static bool
AddLengthProperty(JSContext *cx, JSObject *obj)
{
/*
* Add the 'length' property for a newly created or converted slow array,
* and update the elements to be an empty array owned by the object.
* The shared emptyObjectElements singleton cannot be used for slow arrays,
* as accesses to 'length' will use the elements header.
*/
const jsid lengthId = ATOM_TO_JSID(cx->runtime->atomState.lengthAtom);
JS_ASSERT(!obj->nativeLookup(cx, lengthId));
if (!obj->allocateSlowArrayElements(cx))
return false;
return obj->addProperty(cx, lengthId, array_length_getter, array_length_setter,
SHAPE_INVALID_SLOT, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0);
}
@ -1309,62 +1330,50 @@ JSObject::makeDenseArraySlow(JSContext *cx)
MarkTypeObjectFlags(cx, this,
OBJECT_FLAG_NON_PACKED_ARRAY |
OBJECT_FLAG_NON_DENSE_ARRAY);
markDenseArrayNotPacked(cx);
uint32 arrayCapacity = getDenseArrayCapacity();
uint32 arrayInitialized = getDenseArrayInitializedLength();
/*
* Get an allocated array of the existing elements, evicting from the fixed
* slots if necessary.
*/
if (!hasDynamicElements()) {
if (!growElements(cx, arrayCapacity))
return false;
JS_ASSERT(hasDynamicElements());
}
/*
* Save old map now, before calling InitScopeForObject. We'll have to undo
* on error. This is gross, but a better way is not obvious. Note: the
* exact contents of the array are not preserved on error.
*/
js::Shape *oldMap = lastProp;
js::Shape *oldShape = lastProperty();
/* Create a native scope. */
gc::AllocKind kind = getAllocKind();
js::EmptyShape *empty = InitScopeForObject(cx, this, &SlowArrayClass,
getProto()->getNewType(cx), kind);
if (!empty)
Shape *shape = EmptyShape::getInitialShape(cx, &SlowArrayClass, getProto(),
oldShape->getObjectParent(), kind);
if (!shape)
return false;
setMap(empty);
this->shape_ = shape;
backfillDenseArrayHoles(cx);
/* Take ownership of the dense elements, reset to an empty dense array. */
HeapValue *elems = elements;
elements = emptyObjectElements;
uint32 arrayCapacity = getDenseArrayCapacity();
uint32 arrayInitialized = getDenseArrayInitializedLength();
/*
* Adjust the slots to account for the different layout between dense
* arrays and other objects. The slots must be dynamic, and the fixed slots
* are now available for newly added properties.
*/
if (denseArrayHasInlineSlots()) {
if (!allocSlots(cx, numSlots())) {
setMap(oldMap);
return false;
}
JS_ASSERT(!denseArrayHasInlineSlots());
}
capacity = numFixedSlots() + arrayCapacity;
clasp = &SlowArrayClass;
/*
* Root all values in the array during conversion, as SlowArrayClass only
* protects up to its slot span.
*/
AutoValueArray autoArray(cx, Valueify(slots), arrayInitialized);
/* The initialized length is used iff this is a dense array. */
initializedLength() = 0;
JS_ASSERT(newType == NULL);
/* Root all values in the array during conversion. */
AutoValueArray autoArray(cx, (Value *) elems, arrayInitialized);
/*
* Begin with the length property to share more of the property tree.
* The getter/setter here will directly access the object's private value.
*/
if (!AddLengthProperty(cx, this)) {
setMap(oldMap);
capacity = arrayCapacity;
initializedLength() = arrayInitialized;
clasp = &ArrayClass;
this->shape_ = oldShape;
cx->free_(getElementsHeader());
elements = elems;
return false;
}
@ -1373,37 +1382,30 @@ JSObject::makeDenseArraySlow(JSContext *cx)
* remove holes, so that shapes use successive slots (as for other objects).
*/
uint32 next = 0;
for (uint32 i = 0; i < arrayCapacity; i++) {
for (uint32 i = 0; i < arrayInitialized; i++) {
/* Dense array indexes can always fit in a jsid. */
jsid id;
JS_ALWAYS_TRUE(ValueToId(cx, Int32Value(i), &id));
if (slots[i].isMagic(JS_ARRAY_HOLE))
if (elems[i].isMagic(JS_ARRAY_HOLE))
continue;
/*
* No barrier is needed here because the set of reachable objects before
* and after slowification is the same. During slowification, the
* autoArray rooter guarantees that all slots will be marked.
*
* It's important that we avoid a barrier here because the fixed slots
* of a dense array can be garbage; a write barrier after the switch to
* a slow array could cause a crash.
*/
initSlotUnchecked(next, slots[i]);
if (!addDataProperty(cx, id, next, JSPROP_ENUMERATE)) {
setMap(oldMap);
capacity = arrayCapacity;
initializedLength() = arrayInitialized;
clasp = &ArrayClass;
this->shape_ = oldShape;
cx->free_(getElementsHeader());
elements = elems;
return false;
}
initSlot(next, elems[i]);
next++;
}
clearSlotRange(next, capacity - next);
ObjectElements *oldheader = ObjectElements::fromElements(elems);
getElementsHeader()->length = oldheader->length;
cx->free_(oldheader);
return true;
}
@ -1838,13 +1840,10 @@ InitArrayObject(JSContext *cx, JSObject *obj, jsuint length, const Value *vector
return false;
/* Avoid ensureDenseArrayElements to skip sparse array checks there. */
if (!obj->ensureSlots(cx, length))
if (!obj->ensureElements(cx, length))
return false;
if (cx->typeInferenceEnabled())
obj->setDenseArrayInitializedLength(length);
else
obj->backfillDenseArrayHoles(cx);
obj->setDenseArrayInitializedLength(length);
bool hole = false;
for (jsuint i = 0; i < length; i++) {
@ -2301,11 +2300,10 @@ NewbornArrayPushImpl(JSContext *cx, JSObject *obj, const Value &v)
JS_ASSERT(obj->isDenseArray());
JS_ASSERT(length <= obj->getDenseArrayCapacity());
if (length == obj->getDenseArrayCapacity() && !obj->ensureSlots(cx, length + 1))
if (!obj->ensureElements(cx, length + 1))
return false;
if (cx->typeInferenceEnabled())
obj->setDenseArrayInitializedLength(length + 1);
obj->setDenseArrayInitializedLength(length + 1);
obj->setDenseArrayLength(length + 1);
obj->initDenseArrayElementWithType(cx, length, v);
return true;
@ -2376,7 +2374,7 @@ array_pop_dense(JSContext *cx, JSObject* obj, CallArgs &args)
if (!hole && DeleteArrayElement(cx, obj, index, true) < 0)
return JS_FALSE;
if (cx->typeInferenceEnabled() && obj->getDenseArrayInitializedLength() > index)
if (obj->getDenseArrayInitializedLength() > index)
obj->setDenseArrayInitializedLength(index);
obj->setArrayLength(cx, index);
@ -2438,10 +2436,7 @@ js::array_shift(JSContext *cx, uintN argc, Value *vp)
if (args.rval().isMagic(JS_ARRAY_HOLE))
args.rval().setUndefined();
obj->moveDenseArrayElements(0, 1, length);
if (cx->typeInferenceEnabled())
obj->setDenseArrayInitializedLength(obj->getDenseArrayInitializedLength() - 1);
else
obj->setDenseArrayElement(length, MagicValue(JS_ARRAY_HOLE));
obj->setDenseArrayInitializedLength(obj->getDenseArrayInitializedLength() - 1);
obj->setArrayLength(cx, length);
if (!js_SuppressDeletedProperty(cx, obj, INT_TO_JSID(length)))
return JS_FALSE;
@ -2543,7 +2538,7 @@ TryReuseArrayType(JSObject *obj, JSObject *nobj)
* and has the same prototype.
*/
JS_ASSERT(nobj->isDenseArray());
JS_ASSERT(nobj->type() == nobj->getProto()->newType);
JS_ASSERT(nobj->getProto()->hasNewType(nobj->type()));
if (obj->isArray() && !obj->hasSingletonType() && obj->getProto() == nobj->getProto())
nobj->setType(obj->type());
@ -2693,7 +2688,7 @@ array_splice(JSContext *cx, uintN argc, Value *vp)
obj->setDenseArrayInitializedLength(finalLength);
/* Steps 12(c)-(d). */
obj->shrinkDenseArrayElements(cx, finalLength);
obj->shrinkElements(cx, finalLength);
/* Fix running enumerators for the deleted items. */
if (!js_SuppressDeletedElements(cx, obj, finalLength, len))
@ -2808,7 +2803,7 @@ mjit::stubs::ArrayConcatTwoArrays(VMFrame &f)
/* No overflow here due to nslots limit. */
uint32 len = initlen1 + initlen2;
if (!result->ensureSlots(f.cx, len))
if (!result->ensureElements(f.cx, len))
THROW();
JS_ASSERT(!result->getDenseArrayInitializedLength());
@ -2846,8 +2841,6 @@ js::array_concat(JSContext *cx, uintN argc, Value *vp)
return JS_FALSE;
TryReuseArrayType(aobj, nobj);
nobj->setArrayLength(cx, length);
if (!aobj->isPackedDenseArray())
nobj->markDenseArrayNotPacked(cx);
vp->setObject(*nobj);
if (argc == 0)
return JS_TRUE;
@ -2952,8 +2945,6 @@ array_slice(JSContext *cx, uintN argc, Value *vp)
if (!nobj)
return JS_FALSE;
TryReuseArrayType(obj, nobj);
if (!obj->isPackedDenseArray())
nobj->markDenseArrayNotPacked(cx);
args.rval().setObject(*nobj);
return JS_TRUE;
}
@ -3609,8 +3600,14 @@ js_InitArrayClass(JSContext *cx, JSObject *obj)
if (!ctor)
return NULL;
/* The default 'new' object for Array.prototype has unknown properties. */
arrayProto->getNewType(cx, NULL, /* markUnknown = */ true);
/*
* The default 'new' type of Array.prototype is required by type inference
* to have unknown properties, to simplify handling of e.g. heterogenous
* arrays in JSON and script literals and allows setDenseArrayElement to
* be used without updating the indexed type set for such default arrays.
*/
if (!arrayProto->setNewTypeUnknown(cx))
return NULL;
if (!LinkConstructorAndPrototype(cx, ctor, arrayProto))
return NULL;
@ -3632,34 +3629,78 @@ js_InitArrayClass(JSContext *cx, JSObject *obj)
*/
namespace js {
static inline bool
EnsureNewArrayElements(JSContext *cx, JSObject *obj, jsuint length)
{
/*
* If ensureElements creates dynamically allocated slots, then having
* fixedSlots is a waste.
*/
DebugOnly<uint32> cap = obj->getDenseArrayCapacity();
if (!obj->ensureElements(cx, length))
return false;
JS_ASSERT_IF(cap, !obj->hasDynamicElements());
return true;
}
template<bool allocateCapacity>
static JS_ALWAYS_INLINE JSObject *
NewArray(JSContext *cx, jsuint length, JSObject *proto)
{
JS_ASSERT_IF(proto, proto->isArray());
gc::AllocKind kind = GuessArrayGCKind(length);
gc::AllocKind kind = GuessObjectGCKind(length, true);
JSObject *obj = detail::NewObject<WithProto::Class, false>(cx, &ArrayClass, proto, NULL, kind);
#ifdef JS_THREADSAFE
JS_ASSERT(CanBeFinalizedInBackground(kind, &ArrayClass));
kind = GetBackgroundAllocKind(kind);
#endif
GlobalObject *parent = GetCurrentGlobal(cx);
NewObjectCache &cache = cx->compartment->newObjectCache;
NewObjectCache::EntryIndex entry = -1;
if (cache.lookupGlobal(&ArrayClass, parent, kind, &entry)) {
JSObject *obj = cache.newObjectFromHit(cx, entry);
if (!obj)
return NULL;
/* Fixup the elements pointer and length, which may be incorrect. */
obj->setFixedElements();
obj->setArrayLength(cx, length);
if (allocateCapacity && !EnsureNewArrayElements(cx, obj, length))
return NULL;
return obj;
}
if (!proto && !FindProto(cx, &ArrayClass, parent, &proto))
return NULL;
types::TypeObject *type = proto->getNewType(cx);
if (!type)
return NULL;
/*
* Get a shape with zero fixed slots, regardless of the size class.
* See JSObject::createDenseArray.
*/
Shape *shape = EmptyShape::getInitialShape(cx, &ArrayClass, proto,
proto->getParent(), gc::FINALIZE_OBJECT0);
if (!shape)
return NULL;
JSObject* obj = JSObject::createDenseArray(cx, kind, shape, type, length);
if (!obj)
return NULL;
obj->setArrayLength(cx, length);
if (entry != -1)
cache.fillGlobal(entry, &ArrayClass, parent, kind, obj);
if (!cx->typeInferenceEnabled()) {
obj->markDenseArrayNotPacked(cx);
obj->backfillDenseArrayHoles(cx);
}
if (allocateCapacity) {
/* If ensureSlots creates dynamically allocated slots, then having fixedSlots is a waste. */
DebugOnly<uint32> oldSlots = obj->numSlots();
if (!obj->ensureSlots(cx, length))
return NULL;
JS_ASSERT_IF(obj->numFixedSlots(), oldSlots == obj->numSlots());
}
if (allocateCapacity && !EnsureNewArrayElements(cx, obj, length))
return NULL;
Probes::createObject(cx, obj);
return obj;
}
@ -3710,8 +3751,7 @@ NewDenseCopiedArray(JSContext *cx, uint32 length, const Value *vp, JSObject *pro
JS_ASSERT(obj->getDenseArrayCapacity() >= length);
if (cx->typeInferenceEnabled())
obj->setDenseArrayInitializedLength(vp ? length : 0);
obj->setDenseArrayInitializedLength(vp ? length : 0);
if (vp)
obj->initDenseArrayElements(0, vp, length);
@ -3722,7 +3762,7 @@ NewDenseCopiedArray(JSContext *cx, uint32 length, const Value *vp, JSObject *pro
JSObject *
NewSlowEmptyArray(JSContext *cx)
{
JSObject *obj = NewNonFunction<WithProto::Class>(cx, &SlowArrayClass, NULL, NULL);
JSObject *obj = NewBuiltinClassInstance(cx, &SlowArrayClass);
if (!obj || !AddLengthProperty(cx, obj))
return NULL;

Просмотреть файл

@ -51,20 +51,6 @@
/* Small arrays are dense, no matter what. */
const uintN MIN_SPARSE_INDEX = 256;
inline uint32
JSObject::getDenseArrayInitializedLength()
{
JS_ASSERT(isDenseArray());
return initializedLength();
}
inline bool
JSObject::isPackedDenseArray()
{
JS_ASSERT(isDenseArray());
return flags & PACKED_ARRAY;
}
namespace js {
/* 2^32-2, inclusive */
const uint32 MAX_ARRAY_INDEX = 4294967294u;

Просмотреть файл

@ -43,31 +43,11 @@
#include "jsinferinlines.h"
#include "jsobjinlines.h"
inline void
JSObject::setDenseArrayInitializedLength(uint32 length)
{
JS_ASSERT(isDenseArray());
JS_ASSERT(length <= getDenseArrayCapacity());
uint32 cur = initializedLength();
prepareSlotRangeForOverwrite(length, cur);
initializedLength() = length;
}
inline void
JSObject::markDenseArrayNotPacked(JSContext *cx)
{
JS_ASSERT(isDenseArray());
if (flags & PACKED_ARRAY) {
flags ^= PACKED_ARRAY;
MarkTypeObjectFlags(cx, this, js::types::OBJECT_FLAG_NON_PACKED_ARRAY);
}
}
inline void
JSObject::backfillDenseArrayHoles(JSContext *cx)
{
/* Ensure an array's elements are fully initialized. */
ensureDenseArrayInitializedLength(cx, getDenseArrayCapacity(), 0);
MarkTypeObjectFlags(cx, this, js::types::OBJECT_FLAG_NON_PACKED_ARRAY);
}
inline void
@ -78,13 +58,14 @@ JSObject::ensureDenseArrayInitializedLength(JSContext *cx, uint32 index, uint32
* mark the elements through 'index + extra' as initialized in preparation
* for a write.
*/
JS_ASSERT(index + extra <= capacity);
if (initializedLength() < index)
JS_ASSERT(index + extra <= getDenseArrayCapacity());
uint32 &initlen = getElementsHeader()->initializedLength;
if (initlen < index)
markDenseArrayNotPacked(cx);
if (initializedLength() < index + extra) {
js::InitValueRange(slots + initializedLength(), index + extra - initializedLength(), true);
initializedLength() = index + extra;
if (initlen < index + extra) {
js::InitValueRange(elements + initlen, index + extra - initlen, true);
initlen = index + extra;
}
}
@ -93,13 +74,7 @@ JSObject::ensureDenseArrayElements(JSContext *cx, uintN index, uintN extra)
{
JS_ASSERT(isDenseArray());
uintN currentCapacity = numSlots();
/*
* Don't take excessive slow paths when inference is disabled, due to
* uninitialized slots between initializedLength and capacity.
*/
JS_ASSERT_IF(!cx->typeInferenceEnabled(), currentCapacity == getDenseArrayInitializedLength());
uintN currentCapacity = getDenseArrayCapacity();
uintN requiredCapacity;
if (extra == 1) {
@ -133,7 +108,7 @@ JSObject::ensureDenseArrayElements(JSContext *cx, uintN index, uintN extra)
willBeSparseDenseArray(requiredCapacity, extra)) {
return ED_SPARSE;
}
if (!growSlots(cx, requiredCapacity))
if (!growElements(cx, requiredCapacity))
return ED_FAILED;
ensureDenseArrayInitializedLength(cx, index, extra);

Просмотреть файл

@ -65,10 +65,9 @@ enum AllocKind {
FINALIZE_OBJECT16,
FINALIZE_OBJECT16_BACKGROUND,
FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
FINALIZE_FUNCTION,
FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION,
FINALIZE_SCRIPT,
FINALIZE_SHAPE,
FINALIZE_BASE_SHAPE,
FINALIZE_TYPE_OBJECT,
#if JS_HAS_XML_SUPPORT
FINALIZE_XML,
@ -81,7 +80,6 @@ enum AllocKind {
static const unsigned FINALIZE_LIMIT = FINALIZE_LAST + 1;
static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT_LAST + 1;
static const unsigned FINALIZE_FUNCTION_AND_OBJECT_LIMIT = FINALIZE_FUNCTION_AND_OBJECT_LAST + 1;
/*
* Live objects are marked black. How many other additional colors are available

Просмотреть файл

@ -360,6 +360,10 @@ struct Class
bool isNative() const {
return !(flags & NON_NATIVE);
}
bool hasPrivate() const {
return !!(flags & JSCLASS_HAS_PRIVATE);
}
};
JS_STATIC_ASSERT(offsetof(JSClass, name) == offsetof(Class, name));

Просмотреть файл

@ -1189,7 +1189,7 @@ js_ReportMissingArg(JSContext *cx, const Value &v, uintN arg)
JS_snprintf(argbuf, sizeof argbuf, "%u", arg);
bytes = NULL;
if (IsFunctionObject(v)) {
atom = v.toObject().getFunctionPrivate()->atom;
atom = v.toObject().toFunction()->atom;
bytes = DecompileValueGenerator(cx, JSDVG_SEARCH_STACK,
v, atom);
if (!bytes)

Просмотреть файл

@ -394,20 +394,6 @@ struct JSRuntime
JSActivityCallback activityCallback;
void *activityCallbackArg;
/*
* Shape regenerated whenever a prototype implicated by an "add property"
* property cache fill and induced trace guard has a readonly property or a
* setter defined on it. This number proxies for the shapes of all objects
* along the prototype chain of all objects in the runtime on which such an
* add-property result has been cached/traced.
*
* See bug 492355 for more details.
*
* This comes early in JSRuntime to minimize the immediate format used by
* trace-JITted code that reads it.
*/
uint32 protoHazardShape;
/* Garbage collector state, used by jsgc.c. */
/*
@ -485,7 +471,6 @@ struct JSRuntime
bool gcPoke;
bool gcMarkAndSweep;
bool gcRunning;
bool gcRegenShapes;
/*
* These options control the zealousness of the GC. The fundamental values
@ -664,21 +649,6 @@ struct JSRuntime
void setTrustedPrincipals(JSPrincipals *p) { trustedPrincipals_ = p; }
JSPrincipals *trustedPrincipals() const { return trustedPrincipals_; }
/*
* Object shape (property cache structural type) identifier generator.
*
* Type 0 stands for the empty scope, and must not be regenerated due to
* uint32 wrap-around. Since js_GenerateShape (in jsinterp.cpp) uses
* atomic pre-increment, the initial value for the first typed non-empty
* scope will be 1.
*
* If this counter overflows into SHAPE_OVERFLOW_BIT (in jsinterp.h), the
* cache is disabled, to avoid aliasing two different types. It stays
* disabled until a triggered GC at some later moment compresses live
* types, minimizing rt->shapeGen in the process.
*/
volatile uint32 shapeGen;
/* Literal table maintained by jsatom.c functions. */
JSAtomState atomState;
@ -2135,29 +2105,6 @@ enum FrameExpandKind {
FRAME_EXPAND_ALL = 1
};
static JS_INLINE JSBool
js_IsPropertyCacheDisabled(JSContext *cx)
{
return cx->runtime->shapeGen >= js::SHAPE_OVERFLOW_BIT;
}
static JS_INLINE uint32
js_RegenerateShapeForGC(JSRuntime *rt)
{
JS_ASSERT(rt->gcRunning);
JS_ASSERT(rt->gcRegenShapes);
/*
* Under the GC, compared with js_GenerateShape, we don't need to use
* atomic increments but we still must make sure that after an overflow
* the shape stays such.
*/
uint32 shape = rt->shapeGen;
shape = (shape + 1) | (shape & js::SHAPE_OVERFLOW_BIT);
rt->shapeGen = shape;
return shape;
}
namespace js {
/************************************************************************/

Просмотреть файл

@ -334,7 +334,7 @@ CallJSNativeConstructor(JSContext *cx, Native native, const CallArgs &args)
JS_ASSERT_IF(native != FunctionProxyClass.construct &&
native != CallableObjectClass.construct &&
native != js::CallOrConstructBoundFunction &&
(!callee.isFunction() || callee.getFunctionPrivate()->u.n.clasp != &ObjectClass),
(!callee.isFunction() || callee.toFunction()->u.n.clasp != &ObjectClass),
!args.rval().isPrimitive() && callee != args.rval().toObject());
return true;
@ -472,12 +472,6 @@ JSContext::ensureGeneratorStackSpace()
return ok;
}
inline js::RegExpStatics *
JSContext::regExpStatics()
{
return js::GetGlobalForScopeChain(this)->getRegExpStatics();
}
inline void
JSContext::setPendingException(js::Value v) {
this->throwing = true;

Просмотреть файл

@ -84,14 +84,7 @@ JSCompartment::JSCompartment(JSRuntime *rt)
jaegerCompartment_(NULL),
#endif
propertyTree(thisForCtor()),
emptyArgumentsShape(NULL),
emptyBlockShape(NULL),
emptyCallShape(NULL),
emptyDeclEnvShape(NULL),
emptyEnumeratorShape(NULL),
emptyWithShape(NULL),
initialRegExpShape(NULL),
initialStringShape(NULL),
emptyTypeObject(NULL),
debugModeBits(rt->debugMode ? DebugFromC : 0),
mathCache(NULL),
breakpointSites(rt),
@ -121,6 +114,8 @@ JSCompartment::init(JSContext *cx)
activeAnalysis = activeInference = false;
types.init(cx);
newObjectCache.reset();
if (!crossCompartmentWrappers.init())
return false;
@ -260,7 +255,8 @@ JSCompartment::wrap(JSContext *cx, Value *vp)
JS_ASSERT(obj->isCrossCompartmentWrapper());
if (global->getClass() != &dummy_class && obj->getParent() != global) {
do {
obj->setParent(global);
if (!obj->setParent(cx, global))
return false;
obj = obj->getProto();
} while (obj && obj->isCrossCompartmentWrapper());
}
@ -314,7 +310,8 @@ JSCompartment::wrap(JSContext *cx, Value *vp)
if (!crossCompartmentWrappers.put(GetProxyPrivate(wrapper), *vp))
return false;
wrapper->setParent(global);
if (!wrapper->setParent(cx, global))
return false;
return true;
}
@ -432,11 +429,11 @@ JSCompartment::markTypes(JSTracer *trc)
}
for (size_t thingKind = FINALIZE_OBJECT0;
thingKind < FINALIZE_FUNCTION_AND_OBJECT_LIMIT;
thingKind < FINALIZE_OBJECT_LIMIT;
thingKind++) {
for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) {
JSObject *object = i.get<JSObject>();
if (!object->isNewborn() && object->hasSingletonType())
if (object->hasSingletonType())
MarkRoot(trc, object, "mark_types_singleton");
}
}
@ -459,24 +456,17 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes)
}
}
/* Remove dead empty shapes. */
if (emptyArgumentsShape && IsAboutToBeFinalized(cx, emptyArgumentsShape))
emptyArgumentsShape = NULL;
if (emptyBlockShape && IsAboutToBeFinalized(cx, emptyBlockShape))
emptyBlockShape = NULL;
if (emptyCallShape && IsAboutToBeFinalized(cx, emptyCallShape))
emptyCallShape = NULL;
if (emptyDeclEnvShape && IsAboutToBeFinalized(cx, emptyDeclEnvShape))
emptyDeclEnvShape = NULL;
if (emptyEnumeratorShape && IsAboutToBeFinalized(cx, emptyEnumeratorShape))
emptyEnumeratorShape = NULL;
if (emptyWithShape && IsAboutToBeFinalized(cx, emptyWithShape))
emptyWithShape = NULL;
/* Remove dead references held weakly by the compartment. */
if (initialRegExpShape && IsAboutToBeFinalized(cx, initialRegExpShape))
initialRegExpShape = NULL;
if (initialStringShape && IsAboutToBeFinalized(cx, initialStringShape))
initialStringShape = NULL;
sweepBaseShapeTable(cx);
sweepInitialShapeTable(cx);
sweepNewTypeObjectTable(cx, newTypeObjects);
sweepNewTypeObjectTable(cx, lazyTypeObjects);
if (emptyTypeObject && IsAboutToBeFinalized(cx, emptyTypeObject))
emptyTypeObject = NULL;
newObjectCache.reset();
sweepBreakpoints(cx);

Просмотреть файл

@ -46,6 +46,7 @@
#include "jsgc.h"
#include "jsgcstats.h"
#include "jsobj.h"
#include "jsscope.h"
#include "vm/GlobalObject.h"
#ifdef _MSC_VER
@ -248,37 +249,28 @@ struct JS_FRIEND_API(JSCompartment) {
jsrefcount liveDictModeNodes;
#endif
typedef js::ReadBarriered<js::EmptyShape> BarrieredEmptyShape;
typedef js::ReadBarriered<const js::Shape> BarrieredShape;
/* Set of all unowned base shapes in the compartment. */
js::BaseShapeSet baseShapes;
void sweepBaseShapeTable(JSContext *cx);
/*
* Runtime-shared empty scopes for well-known built-in objects that lack
* class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
*/
BarrieredEmptyShape emptyArgumentsShape;
BarrieredEmptyShape emptyBlockShape;
BarrieredEmptyShape emptyCallShape;
BarrieredEmptyShape emptyDeclEnvShape;
BarrieredEmptyShape emptyEnumeratorShape;
BarrieredEmptyShape emptyWithShape;
/* Set of initial shapes in the compartment. */
js::InitialShapeSet initialShapes;
void sweepInitialShapeTable(JSContext *cx);
typedef js::HashSet<js::EmptyShape *,
js::DefaultHasher<js::EmptyShape *>,
js::SystemAllocPolicy> EmptyShapeSet;
/* Set of default 'new' or lazy types in the compartment. */
js::types::TypeObjectSet newTypeObjects;
js::types::TypeObjectSet lazyTypeObjects;
void sweepNewTypeObjectTable(JSContext *cx, js::types::TypeObjectSet &table);
EmptyShapeSet emptyShapes;
js::types::TypeObject *emptyTypeObject;
/*
* Initial shapes given to RegExp and String objects, encoding the initial
* sets of built-in instance properties and the fixed slots where they must
* be stored (see JSObject::JSSLOT_(REGEXP|STRING)_*). Later property
* additions may cause these shapes to not be used by a RegExp or String
* (even along the entire shape parent chain, should the object go into
* dictionary mode). But because all the initial properties are
* non-configurable, they will always map to fixed slots.
*/
BarrieredShape initialRegExpShape;
BarrieredShape initialStringShape;
/* Get the default 'new' type for objects with a NULL prototype. */
inline js::types::TypeObject *getEmptyType(JSContext *cx);
js::types::TypeObject *getLazyType(JSContext *cx, JSObject *proto);
/* Cache to speed up object creation. */
js::NewObjectCache newObjectCache;
private:
enum { DebugFromC = 1, DebugFromJS = 2 };

Просмотреть файл

@ -1228,9 +1228,11 @@ SetUTCTime(JSContext *cx, JSObject *obj, jsdouble t, Value *vp = NULL)
{
JS_ASSERT(obj->isDate());
size_t slotCap = JS_MIN(obj->numSlots(), JSObject::DATE_CLASS_RESERVED_SLOTS);
for (size_t ind = JSObject::JSSLOT_DATE_COMPONENTS_START; ind < slotCap; ind++)
for (size_t ind = JSObject::JSSLOT_DATE_COMPONENTS_START;
ind < JSObject::DATE_CLASS_RESERVED_SLOTS;
ind++) {
obj->setSlot(ind, UndefinedValue());
}
obj->setDateUTCTime(DoubleValue(t));
if (vp)
@ -1257,12 +1259,6 @@ FillLocalTimes(JSContext *cx, JSObject *obj)
jsdouble utcTime = obj->getDateUTCTime().toNumber();
/* Make sure there are slots to store the cached information. */
if (obj->numSlots() < JSObject::DATE_CLASS_RESERVED_SLOTS) {
if (!obj->growSlots(cx, JSObject::DATE_CLASS_RESERVED_SLOTS))
return false;
}
if (!JSDOUBLE_IS_FINITE(utcTime)) {
for (size_t ind = JSObject::JSSLOT_DATE_COMPONENTS_START;
ind < JSObject::DATE_CLASS_RESERVED_SLOTS;
@ -2696,8 +2692,6 @@ js_InitDateClass(JSContext *cx, JSObject *obj)
{
return NULL;
}
if (!cx->typeInferenceEnabled())
dateProto->brand(cx);
if (!DefineConstructorAndPrototype(cx, global, JSProto_Date, ctor, dateProto))
return NULL;
@ -2709,7 +2703,7 @@ JS_FRIEND_API(JSObject *)
js_NewDateObjectMsec(JSContext *cx, jsdouble msec_time)
{
JSObject *obj = NewBuiltinClassInstance(cx, &DateClass);
if (!obj || !obj->ensureSlots(cx, JSObject::DATE_CLASS_RESERVED_SLOTS))
if (!obj)
return NULL;
if (!SetUTCTime(cx, obj, msec_time))
return NULL;

Просмотреть файл

@ -616,10 +616,15 @@ JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fpArg)
return NULL;
JS_ASSERT(fp->callee().isFunction());
JS_ASSERT(fp->callee().getPrivate() == fp->fun());
return &fp->callee();
}
JS_PUBLIC_API(JSObject *)
JS_GetParentOrScopeChain(JSContext *cx, JSObject *obj)
{
return obj->scopeChain();
}
JS_PUBLIC_API(JSBool)
JS_IsConstructorFrame(JSContext *cx, JSStackFrame *fp)
{
@ -786,7 +791,7 @@ JS_PropertyIterator(JSObject *obj, JSScopeProperty **iteratorp)
shape = shape->previous();
if (!shape->previous()) {
JS_ASSERT(JSID_IS_EMPTY(shape->propid));
JS_ASSERT(shape->isEmptyShape());
shape = NULL;
}
@ -799,7 +804,7 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
{
assertSameCompartment(cx, obj);
Shape *shape = (Shape *) sprop;
pd->id = IdToJsval(shape->propid);
pd->id = IdToJsval(shape->propid());
JSBool wasThrowing = cx->isExceptionPending();
Value lastException = UndefinedValue();
@ -807,7 +812,7 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
lastException = cx->getPendingException();
cx->clearPendingException();
if (!js_GetProperty(cx, obj, shape->propid, &pd->value)) {
if (!js_GetProperty(cx, obj, shape->propid(), &pd->value)) {
if (!cx->isExceptionPending()) {
pd->flags = JSPD_ERROR;
pd->value = JSVAL_VOID;
@ -827,21 +832,21 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
| (!shape->configurable() ? JSPD_PERMANENT : 0);
pd->spare = 0;
if (shape->getter() == GetCallArg) {
pd->slot = shape->shortid;
pd->slot = shape->shortid();
pd->flags |= JSPD_ARGUMENT;
} else if (shape->getter() == GetCallVar) {
pd->slot = shape->shortid;
pd->slot = shape->shortid();
pd->flags |= JSPD_VARIABLE;
} else {
pd->slot = 0;
}
pd->alias = JSVAL_VOID;
if (obj->containsSlot(shape->slot)) {
if (obj->containsSlot(shape->slot())) {
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
const Shape &aprop = r.front();
if (&aprop != shape && aprop.slot == shape->slot) {
pd->alias = IdToJsval(aprop.propid);
if (&aprop != shape && aprop.slot() == shape->slot()) {
pd->alias = IdToJsval(aprop.propid());
break;
}
}
@ -1063,8 +1068,7 @@ JS_IsSystemObject(JSContext *cx, JSObject *obj)
JS_PUBLIC_API(JSBool)
JS_MakeSystemObject(JSContext *cx, JSObject *obj)
{
obj->setSystem();
return true;
return obj->setSystem(cx);
}
/************************************************************************/

Просмотреть файл

@ -279,6 +279,9 @@ JS_GetFrameFunction(JSContext *cx, JSStackFrame *fp);
extern JS_PUBLIC_API(JSObject *)
JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fp);
extern JS_PUBLIC_API(JSObject *)
JS_GetParentOrScopeChain(JSContext *cx, JSObject *obj);
/* XXXrginda Initially published with typo */
#define JS_IsContructorFrame JS_IsConstructorFrame
extern JS_PUBLIC_API(JSBool)
@ -458,9 +461,7 @@ JS_GetScriptTotalSize(JSContext *cx, JSScript *script);
* Return true if obj is a "system" object, that is, one created by
* JS_NewSystemObject with the system flag set and not JS_NewObject.
*
* What "system" means is up to the API client, but it can be used to implement
* access control policies based on script filenames and their prefixes, using
* JS_FlagScriptFilenamePrefix and JS_GetTopScriptFilenameFlags.
* What "system" means is up to the API client.
*/
extern JS_PUBLIC_API(JSBool)
JS_IsSystemObject(JSContext *cx, JSObject *obj);

Просмотреть файл

@ -550,7 +550,7 @@ ValueToShortSource(JSContext *cx, const Value &v)
/*
* XXX Avoid function decompilation bloat for now.
*/
str = JS_GetFunctionId(obj->getFunctionPrivate());
str = JS_GetFunctionId(obj->toFunction());
if (!str && !(str = js_ValueToSource(cx, v))) {
/*
* Continue to soldier on if the function couldn't be
@ -700,10 +700,6 @@ FilenameToString(JSContext *cx, const char *filename)
return JS_NewStringCopyZ(cx, filename);
}
enum {
JSSLOT_ERROR_EXNTYPE = 0
};
static JSBool
Exception(JSContext *cx, uintN argc, Value *vp)
{
@ -726,7 +722,7 @@ Exception(JSContext *cx, uintN argc, Value *vp)
}
JSObject *errProto = &protov.toObject();
JSObject *obj = NewNativeClassInstance(cx, &ErrorClass, errProto, errProto->getParent());
JSObject *obj = NewObjectWithGivenProto(cx, &ErrorClass, errProto, NULL);
if (!obj)
return false;
@ -772,7 +768,7 @@ Exception(JSContext *cx, uintN argc, Value *vp)
lineno = iter.done() ? 0 : js_FramePCToLineNumber(cx, iter.fp(), iter.pc());
}
intN exnType = args.callee().getReservedSlot(JSSLOT_ERROR_EXNTYPE).toInt32();
intN exnType = args.callee().toFunction()->getExtendedSlot(0).toInt32();
if (!InitExnPrivate(cx, obj, message, filename, lineno, NULL, exnType))
return false;
@ -1035,10 +1031,11 @@ InitErrorClass(JSContext *cx, GlobalObject *global, intN type, JSObject &proto)
}
/* Create the corresponding constructor. */
JSFunction *ctor = global->createConstructor(cx, Exception, &ErrorClass, name, 1);
JSFunction *ctor = global->createConstructor(cx, Exception, &ErrorClass, name, 1,
JSFunction::ExtendedFinalizeKind);
if (!ctor)
return NULL;
ctor->setReservedSlot(JSSLOT_ERROR_EXNTYPE, Int32Value(int32(type)));
ctor->setExtendedSlot(0, Int32Value(int32(type)));
if (!LinkConstructorAndPrototype(cx, ctor, errorProto))
return NULL;
@ -1174,7 +1171,7 @@ js_ErrorToException(JSContext *cx, const char *message, JSErrorReport *reportp,
goto out;
tv[0] = OBJECT_TO_JSVAL(errProto);
errObject = NewNativeClassInstance(cx, &ErrorClass, errProto, errProto->getParent());
errObject = NewObjectWithGivenProto(cx, &ErrorClass, errProto, NULL);
if (!errObject) {
ok = JS_FALSE;
goto out;
@ -1362,7 +1359,7 @@ js_CopyErrorObject(JSContext *cx, JSObject *errobj, JSObject *scope)
JSObject *proto;
if (!js_GetClassPrototype(cx, scope->getGlobal(), GetExceptionProtoKey(copy->exnType), &proto))
return NULL;
JSObject *copyobj = NewNativeClassInstance(cx, &ErrorClass, proto, proto->getParent());
JSObject *copyobj = NewObjectWithGivenProto(cx, &ErrorClass, proto, NULL);
copyobj->setPrivate(copy);
autoFree.p = NULL;
return copyobj;

Просмотреть файл

@ -85,7 +85,7 @@ JS_FRIEND_API(JSFunction *)
JS_GetObjectFunction(JSObject *obj)
{
if (obj->isFunction())
return obj->getFunctionPrivate();
return obj->toFunction();
return NULL;
}
@ -125,14 +125,6 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj
return obj;
}
JS_FRIEND_API(uint32)
JS_ObjectCountDynamicSlots(JSObject *obj)
{
if (obj->hasSlotsArray())
return obj->numDynamicSlots(obj->numSlots());
return 0;
}
JS_PUBLIC_API(void)
JS_ShrinkingGC(JSContext *cx)
{
@ -186,20 +178,126 @@ AutoSwitchCompartment::~AutoSwitchCompartment()
cx->compartment = oldCompartment;
}
#ifdef DEBUG
JS_FRIEND_API(void)
js::CheckReservedSlot(const JSObject *obj, size_t slot)
JS_FRIEND_API(size_t)
js::GetObjectDynamicSlotSize(JSObject *obj, JSMallocSizeOfFun mallocSizeOf)
{
CheckSlot(obj, slot);
JS_ASSERT(slot < JSSLOT_FREE(obj->getClass()));
return obj->dynamicSlotSize(mallocSizeOf);
}
JS_FRIEND_API(size_t)
js::GetCompartmentShapeTableSize(JSCompartment *c, JSMallocSizeOfFun mallocSizeOf)
{
return c->baseShapes.sizeOfExcludingThis(mallocSizeOf)
+ c->initialShapes.sizeOfExcludingThis(mallocSizeOf)
+ c->newTypeObjects.sizeOfExcludingThis(mallocSizeOf)
+ c->lazyTypeObjects.sizeOfExcludingThis(mallocSizeOf);
}
JS_FRIEND_API(bool)
js::IsScopeObject(const JSObject *obj)
{
return obj->isInternalScope();
}
JS_FRIEND_API(JSObject *)
js::GetObjectParentMaybeScope(const JSObject *obj)
{
return obj->scopeChain();
}
JS_FRIEND_API(JSObject *)
js::GetGlobalForObjectCrossCompartment(JSObject *obj)
{
return obj->getGlobal();
}
JS_FRIEND_API(uint32)
js::GetObjectSlotSpan(const JSObject *obj)
{
return obj->slotSpan();
}
JS_FRIEND_API(bool)
js::IsOriginalScriptFunction(JSFunction *fun)
{
return fun->script()->function() == fun;
}
JS_FRIEND_API(JSFunction *)
js::DefineFunctionWithReserved(JSContext *cx, JSObject *obj, const char *name, JSNative call,
uintN nargs, uintN attrs)
{
JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment);
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj);
JSAtom *atom = js_Atomize(cx, name, strlen(name));
if (!atom)
return NULL;
return js_DefineFunction(cx, obj, ATOM_TO_JSID(atom), call, nargs, attrs,
JSFunction::ExtendedFinalizeKind);
}
JS_FRIEND_API(JSFunction *)
js::NewFunctionWithReserved(JSContext *cx, JSNative native, uintN nargs, uintN flags,
JSObject *parent, const char *name)
{
JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment);
JSAtom *atom;
CHECK_REQUEST(cx);
assertSameCompartment(cx, parent);
if (!name) {
atom = NULL;
} else {
atom = js_Atomize(cx, name, strlen(name));
if (!atom)
return NULL;
}
return js_NewFunction(cx, NULL, native, nargs, flags, parent, atom,
JSFunction::ExtendedFinalizeKind);
}
JS_FRIEND_API(JSFunction *)
js::NewFunctionByIdWithReserved(JSContext *cx, JSNative native, uintN nargs, uintN flags, JSObject *parent,
jsid id)
{
JS_ASSERT(JSID_IS_STRING(id));
JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment);
CHECK_REQUEST(cx);
assertSameCompartment(cx, parent);
return js_NewFunction(cx, NULL, native, nargs, flags, parent, JSID_TO_ATOM(id),
JSFunction::ExtendedFinalizeKind);
}
JS_FRIEND_API(JSObject *)
js::InitClassWithReserved(JSContext *cx, JSObject *obj, JSObject *parent_proto,
JSClass *clasp, JSNative constructor, uintN nargs,
JSPropertySpec *ps, JSFunctionSpec *fs,
JSPropertySpec *static_ps, JSFunctionSpec *static_fs)
{
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj, parent_proto);
return js_InitClass(cx, obj, parent_proto, Valueify(clasp), constructor,
nargs, ps, fs, static_ps, static_fs, NULL,
JSFunction::ExtendedFinalizeKind);
}
JS_FRIEND_API(const Value &)
js::GetFunctionNativeReserved(JSObject *fun, size_t which)
{
JS_ASSERT(fun->toFunction()->isNative());
return fun->toFunction()->getExtendedSlot(which);
}
JS_FRIEND_API(void)
js::CheckSlot(const JSObject *obj, size_t slot)
js::SetFunctionNativeReserved(JSObject *fun, size_t which, const Value &val)
{
JS_ASSERT(slot < obj->numSlots());
JS_ASSERT(fun->toFunction()->isNative());
fun->toFunction()->setExtendedSlot(which, val);
}
#endif
/*
* The below code is for temporary telemetry use. It can be removed when

Просмотреть файл

@ -113,7 +113,6 @@ typedef struct TypeInferenceMemoryStats
int64 objects;
int64 tables;
int64 temporary;
int64 emptyShapes;
} TypeInferenceMemoryStats;
extern JS_FRIEND_API(void)
@ -199,6 +198,12 @@ JS_FRIEND_API(JSBool) obj_defineGetter(JSContext *cx, uintN argc, js::Value *vp)
JS_FRIEND_API(JSBool) obj_defineSetter(JSContext *cx, uintN argc, js::Value *vp);
#endif
extern JS_FRIEND_API(size_t)
GetObjectDynamicSlotSize(JSObject *obj, JSMallocSizeOfFun mallocSizeOf);
extern JS_FRIEND_API(size_t)
GetCompartmentShapeTableSize(JSCompartment *c, JSMallocSizeOfFun mallocSizeOf);
/*
* Check whether it is OK to assign an undeclared property with name
* propname of the global object in the current script on cx. Reports
@ -244,24 +249,34 @@ struct TypeObject {
JSObject *proto;
};
struct Object {
void *_1;
struct BaseShape {
js::Class *clasp;
uint32 flags;
uint32 objShape;
void *_2;
JSObject *parent;
void *privateData;
jsuword capacity;
js::Value *slots;
TypeObject *type;
};
struct Shape {
BaseShape *base;
jsid _1;
uint32 slotInfo;
static const uint32 FIXED_SLOTS_SHIFT = 27;
};
struct Object {
Shape *shape;
TypeObject *type;
js::Value *slots;
js::Value *_1;
size_t numFixedSlots() const { return shape->slotInfo >> Shape::FIXED_SLOTS_SHIFT; }
Value *fixedSlots() const {
return (Value *)((jsuword) this + sizeof(shadow::Object));
}
js::Value &slotRef(size_t slot) const {
size_t nfixed = flags >> FIXED_SLOTS_SHIFT;
size_t nfixed = numFixedSlots();
if (slot < nfixed)
return ((Value *)((jsuword) this + sizeof(shadow::Object)))[slot];
return fixedSlots()[slot];
return slots[slot - nfixed];
}
};
@ -284,7 +299,7 @@ extern JS_FRIEND_DATA(js::Class) XMLClass;
inline js::Class *
GetObjectClass(const JSObject *obj)
{
return reinterpret_cast<const shadow::Object*>(obj)->clasp;
return reinterpret_cast<const shadow::Object*>(obj)->shape->base->clasp;
}
inline JSClass *
@ -293,12 +308,49 @@ GetObjectJSClass(const JSObject *obj)
return js::Jsvalify(GetObjectClass(obj));
}
JS_FRIEND_API(bool)
IsScopeObject(const JSObject *obj);
inline JSObject *
GetObjectParent(const JSObject *obj)
{
return reinterpret_cast<const shadow::Object*>(obj)->parent;
JS_ASSERT(!IsScopeObject(obj));
return reinterpret_cast<const shadow::Object*>(obj)->shape->base->parent;
}
JS_FRIEND_API(JSObject *)
GetObjectParentMaybeScope(const JSObject *obj);
JS_FRIEND_API(JSObject *)
GetGlobalForObjectCrossCompartment(JSObject *obj);
JS_FRIEND_API(bool)
IsOriginalScriptFunction(JSFunction *fun);
JS_FRIEND_API(JSFunction *)
DefineFunctionWithReserved(JSContext *cx, JSObject *obj, const char *name, JSNative call,
uintN nargs, uintN attrs);
JS_FRIEND_API(JSFunction *)
NewFunctionWithReserved(JSContext *cx, JSNative call, uintN nargs, uintN flags,
JSObject *parent, const char *name);
JS_FRIEND_API(JSFunction *)
NewFunctionByIdWithReserved(JSContext *cx, JSNative native, uintN nargs, uintN flags,
JSObject *parent, jsid id);
JS_FRIEND_API(JSObject *)
InitClassWithReserved(JSContext *cx, JSObject *obj, JSObject *parent_proto,
JSClass *clasp, JSNative constructor, uintN nargs,
JSPropertySpec *ps, JSFunctionSpec *fs,
JSPropertySpec *static_ps, JSFunctionSpec *static_fs);
JS_FRIEND_API(const Value &)
GetFunctionNativeReserved(JSObject *fun, size_t which);
JS_FRIEND_API(void)
SetFunctionNativeReserved(JSObject *fun, size_t which, const Value &val);
inline JSObject *
GetObjectProto(const JSObject *obj)
{
@ -308,25 +360,11 @@ GetObjectProto(const JSObject *obj)
inline void *
GetObjectPrivate(const JSObject *obj)
{
return reinterpret_cast<const shadow::Object*>(obj)->privateData;
const shadow::Object *nobj = reinterpret_cast<const shadow::Object*>(obj);
void **addr = reinterpret_cast<void**>(&nobj->fixedSlots()[nobj->numFixedSlots()]);
return *addr;
}
inline JSObject *
GetObjectGlobal(JSObject *obj)
{
while (JSObject *parent = GetObjectParent(obj))
obj = parent;
return obj;
}
#ifdef DEBUG
extern JS_FRIEND_API(void) CheckReservedSlot(const JSObject *obj, size_t slot);
extern JS_FRIEND_API(void) CheckSlot(const JSObject *obj, size_t slot);
#else
inline void CheckReservedSlot(const JSObject *obj, size_t slot) {}
inline void CheckSlot(const JSObject *obj, size_t slot) {}
#endif
/*
* Get a slot that is both reserved for object's clasp *and* is fixed (fits
* within the maximum capacity for the object's fixed slots).
@ -334,34 +372,32 @@ inline void CheckSlot(const JSObject *obj, size_t slot) {}
inline const Value &
GetReservedSlot(const JSObject *obj, size_t slot)
{
CheckReservedSlot(obj, slot);
JS_ASSERT(slot < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj)));
return reinterpret_cast<const shadow::Object *>(obj)->slotRef(slot);
}
inline void
SetReservedSlot(JSObject *obj, size_t slot, const Value &value)
{
CheckReservedSlot(obj, slot);
JS_ASSERT(slot < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj)));
reinterpret_cast<shadow::Object *>(obj)->slotRef(slot) = value;
}
inline uint32
GetNumSlots(const JSObject *obj)
{
return uint32(reinterpret_cast<const shadow::Object *>(obj)->capacity);
}
JS_FRIEND_API(uint32)
GetObjectSlotSpan(const JSObject *obj);
inline const Value &
GetSlot(const JSObject *obj, size_t slot)
GetObjectSlot(const JSObject *obj, size_t slot)
{
CheckSlot(obj, slot);
JS_ASSERT(slot < GetObjectSlotSpan(obj));
return reinterpret_cast<const shadow::Object *>(obj)->slotRef(slot);
}
inline uint32
inline Shape *
GetObjectShape(const JSObject *obj)
{
return reinterpret_cast<const shadow::Object*>(obj)->objShape;
shadow::Shape *shape = reinterpret_cast<const shadow::Object*>(obj)->shape;
return reinterpret_cast<Shape *>(shape);
}
static inline js::PropertyOp

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -94,16 +94,17 @@
global object */
#define JSFUN_EXPR_CLOSURE 0x1000 /* expression closure: function(x) x*x */
#define JSFUN_EXTENDED 0x2000 /* structure is FunctionExtended */
#define JSFUN_INTERPRETED 0x4000 /* use u.i if kind >= this value else u.n */
#define JSFUN_FLAT_CLOSURE 0x8000 /* flat (aka "display") closure */
#define JSFUN_NULL_CLOSURE 0xc000 /* null closure entrains no scope chain */
#define JSFUN_KINDMASK 0xc000 /* encode interp vs. native and closure
optimization level -- see above */
struct JSFunction : public JSObject_Slots2
{
/* Functions always have two fixed slots (FUN_CLASS_RESERVED_SLOTS). */
namespace js { class FunctionExtended; }
struct JSFunction : public JSObject
{
uint16 nargs; /* maximum number of specified arguments,
reflected as f.length/f.arity */
uint16 flags; /* flags, see JSFUN_* below and in jsapi.h */
@ -116,10 +117,7 @@ struct JSFunction : public JSObject_Slots2
struct Scripted {
JSScript *script_; /* interpreted bytecode descriptor or null;
use the setter! */
uint16 skipmin; /* net skip amount up (toward zero) from
script_->staticLevel to nearest upvar,
including upvars in nested functions */
js::Shape *names; /* argument and variable names */
JSObject *env; /* environment for new activations */
} i;
void *nativeOrScript;
} u;
@ -163,37 +161,17 @@ struct JSFunction : public JSObject_Slots2
return flags & JSFUN_JOINABLE;
}
JSObject &compiledFunObj() {
return *this;
}
private:
/*
* FunctionClass reserves two slots, which are free in JSObject::fslots
* without requiring dslots allocation. Null closures that can be joined to
* a compiler-created function object use the first one to hold a mutable
* methodAtom() state variable, needed for correct foo.caller handling.
* For an interpreted function, accessors for the initial scope object of
* activations (stack frames) of the function.
*/
enum {
METHOD_ATOM_SLOT = JSSLOT_FUN_METHOD_ATOM
};
inline JSObject *environment() const;
inline void setEnvironment(JSObject *obj);
static inline size_t offsetOfEnvironment() { return offsetof(JSFunction, u.i.env); }
public:
inline void setJoinable();
/*
* Method name imputed from property uniquely assigned to or initialized,
* where the function does not need to be cloned to carry a scope chain or
* flattened upvars.
*/
JSAtom *methodAtom() const {
return (joinable() && getSlot(METHOD_ATOM_SLOT).isString())
? &getSlot(METHOD_ATOM_SLOT).toString()->asAtom()
: NULL;
}
inline void setMethodAtom(JSAtom *atom);
js::HeapPtrScript &script() const {
JS_ASSERT(isInterpreted());
return *(js::HeapPtrScript *)&u.i.script_;
@ -221,10 +199,6 @@ struct JSFunction : public JSObject_Slots2
return offsetof(JSFunction, u.nativeOrScript);
}
/* Number of extra fixed function object slots. */
static const uint32 CLASS_RESERVED_SLOTS = JSObject::FUN_CLASS_RESERVED_SLOTS;
js::Class *getConstructorClass() const {
JS_ASSERT(isNative());
return u.n.clasp;
@ -234,228 +208,127 @@ struct JSFunction : public JSObject_Slots2
JS_ASSERT(isNative());
u.n.clasp = clasp;
}
#if JS_BITS_PER_WORD == 32
static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT2;
static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT4;
#else
static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT4;
static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT8;
#endif
inline void trace(JSTracer *trc);
/* Bound function accessors. */
inline bool initBoundFunction(JSContext *cx, const js::Value &thisArg,
const js::Value *args, uintN argslen);
inline JSObject *getBoundFunctionTarget() const;
inline const js::Value &getBoundFunctionThis() const;
inline const js::Value &getBoundFunctionArgument(uintN which) const;
inline size_t getBoundFunctionArgumentCount() const;
private:
inline js::FunctionExtended *toExtended();
inline const js::FunctionExtended *toExtended() const;
inline bool isExtended() const {
JS_STATIC_ASSERT(FinalizeKind != ExtendedFinalizeKind);
JS_ASSERT(!!(flags & JSFUN_EXTENDED) == (getAllocKind() == ExtendedFinalizeKind));
return !!(flags & JSFUN_EXTENDED);
}
public:
/* Accessors for data stored in extended functions. */
inline void initializeExtended();
inline void setExtendedSlot(size_t which, const js::Value &val);
inline const js::Value &getExtendedSlot(size_t which) const;
/*
* Flat closures with one or more upvars snapshot the upvars' values
* into a vector of js::Values referenced from here. This is a private
* pointer but is set only at creation and does not need to be barriered.
*/
static const uint32 FLAT_CLOSURE_UPVARS_SLOT = 0;
static inline size_t getFlatClosureUpvarsOffset();
inline js::Value getFlatClosureUpvar(uint32 i) const;
inline void setFlatClosureUpvar(uint32 i, const js::Value &v);
inline void initFlatClosureUpvar(uint32 i, const js::Value &v);
private:
inline bool hasFlatClosureUpvars() const;
inline js::HeapValue *getFlatClosureUpvars() const;
public:
/* See comments in fun_finalize. */
inline void finalizeUpvars();
/* Slot holding associated method property, needed for foo.caller handling. */
static const uint32 METHOD_PROPERTY_SLOT = 0;
/* For cloned methods, slot holding the object this was cloned as a property from. */
static const uint32 METHOD_OBJECT_SLOT = 1;
/* Whether this is a function cloned from a method. */
inline bool isClonedMethod() const;
/* For a cloned method, pointer to the object the method was cloned for. */
inline JSObject *methodObj() const;
inline void setMethodObj(JSObject& obj);
/*
* Method name imputed from property uniquely assigned to or initialized,
* where the function does not need to be cloned to carry a scope chain or
* flattened upvars. This is set on both the original and cloned function.
*/
inline JSAtom *methodAtom() const;
inline void setMethodAtom(JSAtom *atom);
};
inline JSFunction *
JSObject::getFunctionPrivate() const
JSObject::toFunction()
{
JS_ASSERT(isFunction());
return reinterpret_cast<JSFunction *>(getPrivate());
JS_ASSERT(JS_ObjectIsFunction(NULL, this));
return static_cast<JSFunction *>(this);
}
namespace js {
struct FlatClosureData {
HeapValue upvars[1];
};
static JS_ALWAYS_INLINE bool
IsFunctionObject(const js::Value &v)
inline const JSFunction *
JSObject::toFunction() const
{
return v.isObject() && v.toObject().isFunction();
JS_ASSERT(JS_ObjectIsFunction(NULL, const_cast<JSObject *>(this)));
return static_cast<const JSFunction *>(this);
}
static JS_ALWAYS_INLINE bool
IsFunctionObject(const js::Value &v, JSObject **funobj)
{
return v.isObject() && (*funobj = &v.toObject())->isFunction();
}
static JS_ALWAYS_INLINE bool
IsFunctionObject(const js::Value &v, JSObject **funobj, JSFunction **fun)
{
bool b = IsFunctionObject(v, funobj);
if (b)
*fun = (*funobj)->getFunctionPrivate();
return b;
}
static JS_ALWAYS_INLINE bool
IsFunctionObject(const js::Value &v, JSFunction **fun)
{
JSObject *funobj;
return IsFunctionObject(v, &funobj, fun);
}
static JS_ALWAYS_INLINE bool
IsNativeFunction(const js::Value &v)
{
JSFunction *fun;
return IsFunctionObject(v, &fun) && fun->isNative();
}
static JS_ALWAYS_INLINE bool
IsNativeFunction(const js::Value &v, JSFunction **fun)
{
return IsFunctionObject(v, fun) && (*fun)->isNative();
}
static JS_ALWAYS_INLINE bool
IsNativeFunction(const js::Value &v, JSNative native)
{
JSFunction *fun;
return IsFunctionObject(v, &fun) && fun->maybeNative() == native;
}
/*
* When we have an object of a builtin class, we don't quite know what its
* valueOf/toString methods are, since these methods may have been overwritten
* or shadowed. However, we can still do better than the general case by
* hard-coding the necessary properties for us to find the native we expect.
*
* TODO: a per-thread shape-based cache would be faster and simpler.
*/
static JS_ALWAYS_INLINE bool
ClassMethodIsNative(JSContext *cx, JSObject *obj, Class *clasp, jsid methodid, JSNative native)
{
JS_ASSERT(obj->getClass() == clasp);
Value v;
if (!HasDataProperty(cx, obj, methodid, &v)) {
JSObject *proto = obj->getProto();
if (!proto || proto->getClass() != clasp || !HasDataProperty(cx, proto, methodid, &v))
return false;
}
return js::IsNativeFunction(v, native);
}
extern JS_ALWAYS_INLINE bool
SameTraceType(const Value &lhs, const Value &rhs)
{
return SameType(lhs, rhs) &&
(lhs.isPrimitive() ||
lhs.toObject().isFunction() == rhs.toObject().isFunction());
}
/*
* Return true if this is a compiler-created internal function accessed by
* its own object. Such a function object must not be accessible to script
* or embedding code.
*/
inline bool
IsInternalFunctionObject(JSObject *funobj)
{
JS_ASSERT(funobj->isFunction());
JSFunction *fun = (JSFunction *) funobj->getPrivate();
return funobj == fun && (fun->flags & JSFUN_LAMBDA) && !funobj->getParent();
}
/* Valueified JS_IsConstructing. */
static JS_ALWAYS_INLINE bool
IsConstructing(const Value *vp)
{
#ifdef DEBUG
JSObject *callee = &JS_CALLEE(cx, vp).toObject();
if (callee->isFunction()) {
JSFunction *fun = callee->getFunctionPrivate();
JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0);
} else {
JS_ASSERT(callee->getClass()->construct != NULL);
}
#endif
return vp[1].isMagic();
}
inline bool
IsConstructing(CallReceiver call);
static JS_ALWAYS_INLINE bool
IsConstructing_PossiblyWithGivenThisObject(const Value *vp, JSObject **ctorThis)
{
#ifdef DEBUG
JSObject *callee = &JS_CALLEE(cx, vp).toObject();
if (callee->isFunction()) {
JSFunction *fun = callee->getFunctionPrivate();
JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0);
} else {
JS_ASSERT(callee->getClass()->construct != NULL);
}
#endif
bool isCtor = vp[1].isMagic();
if (isCtor)
*ctorThis = vp[1].getMagicObjectOrNullPayload();
return isCtor;
}
inline const char *
GetFunctionNameBytes(JSContext *cx, JSFunction *fun, JSAutoByteString *bytes)
{
if (fun->atom)
return bytes->encode(cx, fun->atom);
return js_anonymous_str;
}
extern JSFunctionSpec function_methods[];
extern JSBool
Function(JSContext *cx, uintN argc, Value *vp);
extern bool
IsBuiltinFunctionConstructor(JSFunction *fun);
/*
* Preconditions: funobj->isInterpreted() && !funobj->isFunctionPrototype() &&
* !funobj->isBoundFunction(). This is sufficient to establish that funobj has
* a non-configurable non-method .prototype data property, thought it might not
* have been resolved yet, and its value could be anything.
*
* Return the shape of the .prototype property of funobj, resolving it if
* needed. On error, return NULL.
*
* This is not safe to call on trace because it defines properties, which can
* trigger lookups that could reenter.
*/
const Shape *
LookupInterpretedFunctionPrototype(JSContext *cx, JSObject *funobj);
} /* namespace js */
extern JSString *
fun_toStringHelper(JSContext *cx, JSObject *obj, uintN indent);
extern JSFunction *
js_NewFunction(JSContext *cx, JSObject *funobj, JSNative native, uintN nargs,
uintN flags, JSObject *parent, JSAtom *atom);
uintN flags, JSObject *parent, JSAtom *atom,
js::gc::AllocKind kind = JSFunction::FinalizeKind);
extern void
js_FinalizeFunction(JSContext *cx, JSFunction *fun);
extern JSObject * JS_FASTCALL
js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent,
JSObject *proto);
extern JSFunction * JS_FASTCALL
js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent, JSObject *proto,
js::gc::AllocKind kind = JSFunction::FinalizeKind);
inline JSObject *
CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent,
bool ignoreSingletonClone = false);
inline JSObject *
CloneFunctionObject(JSContext *cx, JSFunction *fun)
{
/*
* Variant which makes an exact clone of fun, preserving parent and proto.
* Calling the above version CloneFunctionObject(cx, fun, fun->getParent())
* is not equivalent: API clients, including XPConnect, can reparent
* objects so that fun->getGlobal() != fun->getProto()->getGlobal().
* See ReparentWrapperIfFound.
*/
JS_ASSERT(fun->getParent() && fun->getProto());
if (fun->hasSingletonType())
return fun;
return js_CloneFunctionObject(cx, fun, fun->getParent(), fun->getProto());
}
extern JSObject * JS_FASTCALL
extern JSFunction * JS_FASTCALL
js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain);
extern JSObject *
extern JSFunction *
js_NewFlatClosure(JSContext *cx, JSFunction *fun, JSOp op, size_t oplen);
extern JSFunction *
js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, JSNative native,
uintN nargs, uintN flags);
uintN nargs, uintN flags,
js::gc::AllocKind kind = JSFunction::FinalizeKind);
/*
* Flags for js_ValueToFunction and js_ReportIsNotFunction.
@ -466,9 +339,6 @@ js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, JSNative native,
extern JSFunction *
js_ValueToFunction(JSContext *cx, const js::Value *vp, uintN flags);
extern JSObject *
js_ValueToFunctionObject(JSContext *cx, js::Value *vp, uintN flags);
extern JSObject *
js_ValueToCallableObject(JSContext *cx, js::Value *vp, uintN flags);
@ -511,8 +381,35 @@ SetCallVar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, js::Value *vp);
extern JSBool
SetCallUpvar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, js::Value *vp);
/*
* Function extended with reserved slots for use by various kinds of functions.
* Most functions do not have these extensions, but enough are that efficient
* storage is required (no malloc'ed reserved slots).
*/
class FunctionExtended : public JSFunction
{
friend struct JSFunction;
/* Reserved slots available for storage by particular native functions. */
HeapValue extendedSlots[2];
};
} // namespace js
inline js::FunctionExtended *
JSFunction::toExtended()
{
JS_ASSERT(isExtended());
return static_cast<js::FunctionExtended *>(this);
}
inline const js::FunctionExtended *
JSFunction::toExtended() const
{
JS_ASSERT(isExtended());
return static_cast<const js::FunctionExtended *>(this);
}
extern JSBool
js_GetArgsValue(JSContext *cx, js::StackFrame *fp, js::Value *vp);

Просмотреть файл

@ -45,58 +45,363 @@
#include "vm/GlobalObject.h"
inline bool
js::IsConstructing(CallReceiver call)
{
return IsConstructing(call.base());
}
inline bool
JSFunction::inStrictMode() const
{
return script()->strictModeCode;
}
inline JSObject *
JSFunction::environment() const
{
JS_ASSERT(isInterpreted());
return u.i.env;
}
inline void
JSFunction::setEnvironment(JSObject *obj)
{
JS_ASSERT(isInterpreted());
u.i.env = obj;
}
inline void
JSFunction::initializeExtended()
{
JS_ASSERT(isExtended());
JS_ASSERT(js::ArrayLength(toExtended()->extendedSlots) == 2);
toExtended()->extendedSlots[0].init(js::UndefinedValue());
toExtended()->extendedSlots[1].init(js::UndefinedValue());
}
inline void
JSFunction::setJoinable()
{
JS_ASSERT(isInterpreted());
setSlot(METHOD_ATOM_SLOT, js::NullValue());
flags |= JSFUN_JOINABLE;
}
inline bool
JSFunction::isClonedMethod() const
{
return joinable() && isExtended() && getExtendedSlot(METHOD_OBJECT_SLOT).isObject();
}
inline JSAtom *
JSFunction::methodAtom() const
{
return (joinable() && isExtended() && getExtendedSlot(METHOD_PROPERTY_SLOT).isString())
? (JSAtom *) getExtendedSlot(METHOD_PROPERTY_SLOT).toString()
: NULL;
}
inline void
JSFunction::setMethodAtom(JSAtom *atom)
{
JS_ASSERT(joinable());
setSlot(METHOD_ATOM_SLOT, js::StringValue(atom));
setExtendedSlot(METHOD_PROPERTY_SLOT, js::StringValue(atom));
}
inline JSObject *
JSFunction::methodObj() const
{
JS_ASSERT(joinable());
return isClonedMethod() ? &getExtendedSlot(METHOD_OBJECT_SLOT).toObject() : NULL;
}
inline void
JSFunction::setMethodObj(JSObject& obj)
{
JS_ASSERT(joinable());
setExtendedSlot(METHOD_OBJECT_SLOT, js::ObjectValue(obj));
}
inline void
JSFunction::setExtendedSlot(size_t which, const js::Value &val)
{
JS_ASSERT(which < js::ArrayLength(toExtended()->extendedSlots));
toExtended()->extendedSlots[which] = val;
}
inline const js::Value &
JSFunction::getExtendedSlot(size_t which) const
{
JS_ASSERT(which < js::ArrayLength(toExtended()->extendedSlots));
return toExtended()->extendedSlots[which];
}
inline bool
JSFunction::hasFlatClosureUpvars() const
{
JS_ASSERT(isFlatClosure());
return isExtended() && !getExtendedSlot(FLAT_CLOSURE_UPVARS_SLOT).isUndefined();
}
inline js::HeapValue *
JSFunction::getFlatClosureUpvars() const
{
JS_ASSERT(hasFlatClosureUpvars());
return (js::HeapValue *) getExtendedSlot(FLAT_CLOSURE_UPVARS_SLOT).toPrivate();
}
inline void
JSFunction::finalizeUpvars()
{
/*
* Cloned function objects may be flat closures with upvars to free.
*
* We must not access JSScript here that is stored in JSFunction. The
* script can be finalized before the function or closure instances. So we
* just check if JSSLOT_FLAT_CLOSURE_UPVARS holds a private value encoded
* as a double. We must also ignore newborn closures that do not have the
* private pointer set.
*
* FIXME bug 648320 - allocate upvars on the GC heap to avoid doing it
* here explicitly.
*/
if (hasFlatClosureUpvars()) {
js::HeapValue *upvars = getFlatClosureUpvars();
js::Foreground::free_(upvars);
}
}
inline js::Value
JSFunction::getFlatClosureUpvar(uint32 i) const
{
JS_ASSERT(hasFlatClosureUpvars());
JS_ASSERT(script()->bindings.countUpvars() == script()->upvars()->length);
JS_ASSERT(i < script()->bindings.countUpvars());
return getFlatClosureUpvars()[i];
}
inline void
JSFunction::setFlatClosureUpvar(uint32 i, const js::Value &v)
{
JS_ASSERT(isFlatClosure());
JS_ASSERT(script()->bindings.countUpvars() == script()->upvars()->length);
JS_ASSERT(i < script()->bindings.countUpvars());
getFlatClosureUpvars()[i] = v;
}
inline void
JSFunction::initFlatClosureUpvar(uint32 i, const js::Value &v)
{
JS_ASSERT(isFlatClosure());
JS_ASSERT(script()->bindings.countUpvars() == script()->upvars()->length);
JS_ASSERT(i < script()->bindings.countUpvars());
getFlatClosureUpvars()[i].init(v);
}
/* static */ inline size_t
JSFunction::getFlatClosureUpvarsOffset()
{
return offsetof(js::FunctionExtended, extendedSlots[FLAT_CLOSURE_UPVARS_SLOT]);
}
namespace js {
static JS_ALWAYS_INLINE bool
IsFunctionObject(const js::Value &v)
{
return v.isObject() && v.toObject().isFunction();
}
static JS_ALWAYS_INLINE bool
IsFunctionObject(const js::Value &v, JSFunction **fun)
{
if (v.isObject() && v.toObject().isFunction()) {
*fun = v.toObject().toFunction();
return true;
}
return false;
}
static JS_ALWAYS_INLINE bool
IsNativeFunction(const js::Value &v)
{
JSFunction *fun;
return IsFunctionObject(v, &fun) && fun->isNative();
}
static JS_ALWAYS_INLINE bool
IsNativeFunction(const js::Value &v, JSFunction **fun)
{
return IsFunctionObject(v, fun) && (*fun)->isNative();
}
static JS_ALWAYS_INLINE bool
IsNativeFunction(const js::Value &v, JSNative native)
{
JSFunction *fun;
return IsFunctionObject(v, &fun) && fun->maybeNative() == native;
}
/*
* When we have an object of a builtin class, we don't quite know what its
* valueOf/toString methods are, since these methods may have been overwritten
* or shadowed. However, we can still do better than the general case by
* hard-coding the necessary properties for us to find the native we expect.
*
* TODO: a per-thread shape-based cache would be faster and simpler.
*/
static JS_ALWAYS_INLINE bool
ClassMethodIsNative(JSContext *cx, JSObject *obj, Class *clasp, jsid methodid, JSNative native)
{
JS_ASSERT(obj->getClass() == clasp);
Value v;
if (!HasDataProperty(cx, obj, methodid, &v)) {
JSObject *proto = obj->getProto();
if (!proto || proto->getClass() != clasp || !HasDataProperty(cx, proto, methodid, &v))
return false;
}
return js::IsNativeFunction(v, native);
}
extern JS_ALWAYS_INLINE bool
SameTraceType(const Value &lhs, const Value &rhs)
{
return SameType(lhs, rhs) &&
(lhs.isPrimitive() ||
lhs.toObject().isFunction() == rhs.toObject().isFunction());
}
/* Valueified JS_IsConstructing. */
static JS_ALWAYS_INLINE bool
IsConstructing(const Value *vp)
{
#ifdef DEBUG
JSObject *callee = &JS_CALLEE(cx, vp).toObject();
if (callee->isFunction()) {
JSFunction *fun = callee->toFunction();
JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0);
} else {
JS_ASSERT(callee->getClass()->construct != NULL);
}
#endif
return vp[1].isMagic();
}
inline bool
IsConstructing(CallReceiver call)
{
return IsConstructing(call.base());
}
static JS_ALWAYS_INLINE bool
IsConstructing_PossiblyWithGivenThisObject(const Value *vp, JSObject **ctorThis)
{
#ifdef DEBUG
JSObject *callee = &JS_CALLEE(cx, vp).toObject();
if (callee->isFunction()) {
JSFunction *fun = callee->toFunction();
JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0);
} else {
JS_ASSERT(callee->getClass()->construct != NULL);
}
#endif
bool isCtor = vp[1].isMagic();
if (isCtor)
*ctorThis = vp[1].getMagicObjectOrNullPayload();
return isCtor;
}
inline const char *
GetFunctionNameBytes(JSContext *cx, JSFunction *fun, JSAutoByteString *bytes)
{
if (fun->atom)
return bytes->encode(cx, fun->atom);
return js_anonymous_str;
}
extern JSFunctionSpec function_methods[];
extern JSBool
Function(JSContext *cx, uintN argc, Value *vp);
extern bool
IsBuiltinFunctionConstructor(JSFunction *fun);
/*
* Preconditions: funobj->isInterpreted() && !funobj->isFunctionPrototype() &&
* !funobj->isBoundFunction(). This is sufficient to establish that funobj has
* a non-configurable non-method .prototype data property, thought it might not
* have been resolved yet, and its value could be anything.
*
* Return the shape of the .prototype property of funobj, resolving it if
* needed. On error, return NULL.
*
* This is not safe to call on trace because it defines properties, which can
* trigger lookups that could reenter.
*/
const Shape *
LookupInterpretedFunctionPrototype(JSContext *cx, JSObject *funobj);
static inline JSObject *
SkipScopeParent(JSObject *parent)
{
if (!parent)
return NULL;
while (parent->isInternalScope())
parent = parent->scopeChain();
return parent;
}
inline JSFunction *
CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent,
bool ignoreSingletonClone /* = false */)
gc::AllocKind kind = JSFunction::FinalizeKind)
{
JS_ASSERT(parent);
JSObject *proto = parent->getGlobal()->getOrCreateFunctionPrototype(cx);
if (!proto)
return NULL;
return js_CloneFunctionObject(cx, fun, parent, proto, kind);
}
inline JSFunction *
CloneFunctionObjectIfNotSingleton(JSContext *cx, JSFunction *fun, JSObject *parent)
{
/*
* For attempts to clone functions at a function definition opcode or from
* a method barrier, don't perform the clone if the function has singleton
* type. CloneFunctionObject was called pessimistically, and we need to
* preserve the type's property that if it is singleton there is only a
* single object with its type in existence.
* type. This was called pessimistically, and we need to preserve the
* type's property that if it is singleton there is only a single object
* with its type in existence.
*/
if (ignoreSingletonClone && fun->hasSingletonType()) {
JS_ASSERT(fun->getProto() == proto);
fun->setParent(parent);
if (fun->hasSingletonType()) {
if (!fun->setParent(cx, SkipScopeParent(parent)))
return NULL;
fun->setEnvironment(parent);
return fun;
}
return js_CloneFunctionObject(cx, fun, parent, proto);
return CloneFunctionObject(cx, fun, parent);
}
inline JSFunction *
CloneFunctionObject(JSContext *cx, JSFunction *fun)
{
/*
* Variant which makes an exact clone of fun, preserving parent and proto.
* Calling the above version CloneFunctionObject(cx, fun, fun->getParent())
* is not equivalent: API clients, including XPConnect, can reparent
* objects so that fun->getGlobal() != fun->getProto()->getGlobal().
* See ReparentWrapperIfFound.
*/
JS_ASSERT(fun->getParent() && fun->getProto());
if (fun->hasSingletonType())
return fun;
return js_CloneFunctionObject(cx, fun, fun->environment(), fun->getProto(),
JSFunction::ExtendedFinalizeKind);
}
} /* namespace js */
inline void
JSFunction::setScript(JSScript *script_)
{

Просмотреть файл

@ -143,9 +143,9 @@ const uint32 Arena::ThingSizes[] = {
sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */
sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16 */
sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */
sizeof(JSFunction), /* FINALIZE_FUNCTION */
sizeof(JSScript), /* FINALIZE_SCRIPT */
sizeof(Shape), /* FINALIZE_SHAPE */
sizeof(BaseShape), /* FINALIZE_BASE_SHAPE */
sizeof(types::TypeObject), /* FINALIZE_TYPE_OBJECT */
#if JS_HAS_XML_SUPPORT
sizeof(JSXML), /* FINALIZE_XML */
@ -170,9 +170,9 @@ const uint32 Arena::FirstThingOffsets[] = {
OFFSET(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */
OFFSET(JSObject_Slots16), /* FINALIZE_OBJECT16 */
OFFSET(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */
OFFSET(JSFunction), /* FINALIZE_FUNCTION */
OFFSET(JSScript), /* FINALIZE_SCRIPT */
OFFSET(Shape), /* FINALIZE_SHAPE */
OFFSET(BaseShape), /* FINALIZE_BASE_SHAPE */
OFFSET(types::TypeObject), /* FINALIZE_TYPE_OBJECT */
#if JS_HAS_XML_SUPPORT
OFFSET(JSXML), /* FINALIZE_XML */
@ -258,7 +258,7 @@ Arena::staticAsserts()
template<typename T>
inline bool
Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize)
Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize, bool background)
{
/* Enforce requirements on size of T. */
JS_ASSERT(thingSize % Cell::CellSize == 0);
@ -307,7 +307,7 @@ Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize)
} else {
if (!newFreeSpanStart)
newFreeSpanStart = thing;
t->finalize(cx);
t->finalize(cx, background);
JS_POISON(t, JS_FREE_PATTERN, thingSize);
}
}
@ -342,7 +342,7 @@ Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize)
template<typename T>
inline void
FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind)
FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind, bool background)
{
/*
* Release empty arenas and move non-full arenas with some free things into
@ -354,7 +354,7 @@ FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKin
ArenaHeader **ap = &al->head;
size_t thingSize = Arena::thingSize(thingKind);
while (ArenaHeader *aheader = *ap) {
bool allClear = aheader->getArena()->finalize<T>(cx, thingKind, thingSize);
bool allClear = aheader->getArena()->finalize<T>(cx, thingKind, thingSize, background);
if (allClear) {
*ap = aheader->next;
aheader->chunk()->releaseArena(aheader);
@ -379,7 +379,7 @@ FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKin
* after the al->head.
*/
static void
FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind)
FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind, bool background)
{
switch(thingKind) {
case FINALIZE_OBJECT0:
@ -394,31 +394,33 @@ FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind)
case FINALIZE_OBJECT12_BACKGROUND:
case FINALIZE_OBJECT16:
case FINALIZE_OBJECT16_BACKGROUND:
case FINALIZE_FUNCTION:
FinalizeTypedArenas<JSObject>(cx, al, thingKind);
FinalizeTypedArenas<JSObject>(cx, al, thingKind, background);
break;
case FINALIZE_SCRIPT:
FinalizeTypedArenas<JSScript>(cx, al, thingKind);
FinalizeTypedArenas<JSScript>(cx, al, thingKind, background);
break;
case FINALIZE_SHAPE:
FinalizeTypedArenas<Shape>(cx, al, thingKind);
FinalizeTypedArenas<Shape>(cx, al, thingKind, background);
break;
case FINALIZE_BASE_SHAPE:
FinalizeTypedArenas<BaseShape>(cx, al, thingKind, background);
break;
case FINALIZE_TYPE_OBJECT:
FinalizeTypedArenas<types::TypeObject>(cx, al, thingKind);
FinalizeTypedArenas<types::TypeObject>(cx, al, thingKind, background);
break;
#if JS_HAS_XML_SUPPORT
case FINALIZE_XML:
FinalizeTypedArenas<JSXML>(cx, al, thingKind);
FinalizeTypedArenas<JSXML>(cx, al, thingKind, background);
break;
#endif
case FINALIZE_STRING:
FinalizeTypedArenas<JSString>(cx, al, thingKind);
FinalizeTypedArenas<JSString>(cx, al, thingKind, background);
break;
case FINALIZE_SHORT_STRING:
FinalizeTypedArenas<JSShortString>(cx, al, thingKind);
FinalizeTypedArenas<JSShortString>(cx, al, thingKind, background);
break;
case FINALIZE_EXTERNAL_STRING:
FinalizeTypedArenas<JSExternalString>(cx, al, thingKind);
FinalizeTypedArenas<JSExternalString>(cx, al, thingKind, background);
break;
}
}
@ -1458,7 +1460,7 @@ ArenaLists::finalizeNow(JSContext *cx, AllocKind thingKind)
#ifdef JS_THREADSAFE
JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
#endif
FinalizeArenas(cx, &arenaLists[thingKind], thingKind);
FinalizeArenas(cx, &arenaLists[thingKind], thingKind, false);
}
inline void
@ -1470,7 +1472,6 @@ ArenaLists::finalizeLater(JSContext *cx, AllocKind thingKind)
thingKind == FINALIZE_OBJECT8_BACKGROUND ||
thingKind == FINALIZE_OBJECT12_BACKGROUND ||
thingKind == FINALIZE_OBJECT16_BACKGROUND ||
thingKind == FINALIZE_FUNCTION ||
thingKind == FINALIZE_SHORT_STRING ||
thingKind == FINALIZE_STRING);
@ -1501,7 +1502,7 @@ ArenaLists::finalizeLater(JSContext *cx, AllocKind thingKind)
al->clear();
backgroundFinalizeState[thingKind] = BFS_RUN;
} else {
FinalizeArenas(cx, al, thingKind);
FinalizeArenas(cx, al, thingKind, false);
backgroundFinalizeState[thingKind] = BFS_DONE;
}
@ -1521,7 +1522,7 @@ ArenaLists::backgroundFinalize(JSContext *cx, ArenaHeader *listHead)
JSCompartment *comp = listHead->compartment;
ArenaList finalized;
finalized.head = listHead;
FinalizeArenas(cx, &finalized, thingKind);
FinalizeArenas(cx, &finalized, thingKind, true);
/*
* After we finish the finalization al->cursor must point to the end of
@ -1574,13 +1575,6 @@ ArenaLists::finalizeObjects(JSContext *cx)
finalizeLater(cx, FINALIZE_OBJECT16_BACKGROUND);
#endif
/*
* We must finalize Function instances after finalizing any other objects
* even if we use the background finalization for the latter. See comments
* in JSObject::finalizeUpvarsIfFlatClosure.
*/
finalizeLater(cx, FINALIZE_FUNCTION);
#if JS_HAS_XML_SUPPORT
finalizeNow(cx, FINALIZE_XML);
#endif
@ -1599,6 +1593,7 @@ void
ArenaLists::finalizeShapes(JSContext *cx)
{
finalizeNow(cx, FINALIZE_SHAPE);
finalizeNow(cx, FINALIZE_BASE_SHAPE);
finalizeNow(cx, FINALIZE_TYPE_OBJECT);
}
@ -2481,18 +2476,6 @@ BeginMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
{
JSRuntime *rt = cx->runtime;
/*
* Reset the property cache's type id generator so we can compress ids.
* Same for the protoHazardShape proxy-shape standing in for all object
* prototypes having readonly or setter properties.
*/
if (rt->shapeGen & SHAPE_OVERFLOW_BIT || (rt->gcZeal() && !rt->gcCurrentCompartment)) {
JS_ASSERT(!rt->gcCurrentCompartment);
rt->gcRegenShapes = true;
rt->shapeGen = 0;
rt->protoHazardShape = 0;
}
for (GCCompartmentsIter c(rt); !c.done(); c.next())
c->purge(cx);
@ -2942,7 +2925,6 @@ GCCycle(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
#endif
rt->gcMarkAndSweep = false;
rt->gcRegenShapes = false;
rt->setGCLastBytes(rt->gcBytes, gckind);
rt->gcCurrentCompartment = NULL;

Просмотреть файл

@ -559,7 +559,7 @@ struct Arena {
}
template <typename T>
bool finalize(JSContext *cx, AllocKind thingKind, size_t thingSize);
bool finalize(JSContext *cx, AllocKind thingKind, size_t thingSize, bool background);
};
/* The chunk header (located at the end of the chunk to preserve arena alignment). */
@ -952,9 +952,9 @@ MapAllocToTraceKind(AllocKind thingKind)
JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_FUNCTION */
JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */
JSTRACE_SHAPE, /* FINALIZE_SHAPE */
JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
#if JS_HAS_XML_SUPPORT /* FINALIZE_XML */
JSTRACE_XML,

Просмотреть файл

@ -68,22 +68,44 @@ const size_t SLOTS_TO_THING_KIND_LIMIT = 17;
/* Get the best kind to use when making an object with the given slot count. */
static inline AllocKind
GetGCObjectKind(size_t numSlots, bool isArray = false)
GetGCObjectKind(size_t numSlots)
{
extern AllocKind slotsToThingKind[];
if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) {
/*
* If the object will definitely want more than the maximum number of
* fixed slots, use zero fixed slots for arrays and the maximum for
* other objects. Arrays do not use their fixed slots anymore when
* they have a slots array, while other objects will continue to do so.
*/
return isArray ? FINALIZE_OBJECT0 : FINALIZE_OBJECT16;
}
if (numSlots >= SLOTS_TO_THING_KIND_LIMIT)
return FINALIZE_OBJECT16;
return slotsToThingKind[numSlots];
}
static inline AllocKind
GetGCObjectKind(Class *clasp)
{
if (clasp == &FunctionClass)
return JSFunction::FinalizeKind;
uint32 nslots = JSCLASS_RESERVED_SLOTS(clasp);
if (clasp->flags & JSCLASS_HAS_PRIVATE)
nslots++;
return GetGCObjectKind(nslots);
}
/* As for GetGCObjectKind, but for dense array allocation. */
static inline AllocKind
GetGCArrayKind(size_t numSlots)
{
extern AllocKind slotsToThingKind[];
/*
* Dense arrays can use their fixed slots to hold their elements array
* (less two Values worth of ObjectElements header), but if more than the
* maximum number of fixed slots is needed then the fixed slots will be
* unused.
*/
JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2);
if (numSlots > JSObject::NELEMENTS_LIMIT || numSlots + 2 >= SLOTS_TO_THING_KIND_LIMIT)
return FINALIZE_OBJECT2;
return slotsToThingKind[numSlots + 2];
}
static inline AllocKind
GetGCObjectFixedSlotsKind(size_t numFixedSlots)
{
@ -151,6 +173,27 @@ GetGCKindSlots(AllocKind thingKind)
}
}
static inline size_t
GetGCKindSlots(AllocKind thingKind, Class *clasp)
{
size_t nslots = GetGCKindSlots(thingKind);
/* An object's private data uses the space taken by its last fixed slot. */
if (clasp->flags & JSCLASS_HAS_PRIVATE) {
JS_ASSERT(nslots > 0);
nslots--;
}
/*
* Functions have a larger finalize kind than FINALIZE_OBJECT to reserve
* space for the extra fields in JSFunction, but have no fixed slots.
*/
if (clasp == &FunctionClass)
nslots = 0;
return nslots;
}
static inline void
GCPoke(JSContext *cx, Value oldval)
{
@ -324,9 +367,6 @@ class CellIter: public CellIterImpl
inline void EmptyArenaOp(Arena *arena) {}
inline void EmptyCellOp(Cell *t) {}
} /* namespace gc */
} /* namespace js */
/*
* Allocates a new GC thing. After a successful allocation the caller must
* fully initialize the thing before calling any function that can potentially
@ -358,54 +398,80 @@ NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
return static_cast<T *>(t);
}
/* Alternate form which allocates a GC thing if doing so cannot trigger a GC. */
template <typename T>
inline T *
TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
{
JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
#ifdef JS_THREADSAFE
JS_ASSERT_IF((cx->compartment == cx->runtime->atomsCompartment),
kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING);
#endif
JS_ASSERT(!cx->runtime->gcRunning);
JS_ASSERT(!JS_THREAD_DATA(cx)->noGCOrAllocationCheck);
#ifdef JS_GC_ZEAL
if (cx->runtime->needZealousGC())
return NULL;
#endif
void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
return static_cast<T *>(t);
}
} /* namespace gc */
} /* namespace js */
inline JSObject *
js_NewGCObject(JSContext *cx, js::gc::AllocKind kind)
{
JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
JSObject *obj = NewGCThing<JSObject>(cx, kind, js::gc::Arena::thingSize(kind));
if (obj)
obj->earlyInit(js::gc::GetGCKindSlots(kind));
return obj;
return js::gc::NewGCThing<JSObject>(cx, kind, js::gc::Arena::thingSize(kind));
}
inline JSObject *
js_TryNewGCObject(JSContext *cx, js::gc::AllocKind kind)
{
JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
return js::gc::TryNewGCThing<JSObject>(cx, kind, js::gc::Arena::thingSize(kind));
}
inline JSString *
js_NewGCString(JSContext *cx)
{
return NewGCThing<JSString>(cx, js::gc::FINALIZE_STRING, sizeof(JSString));
return js::gc::NewGCThing<JSString>(cx, js::gc::FINALIZE_STRING, sizeof(JSString));
}
inline JSShortString *
js_NewGCShortString(JSContext *cx)
{
return NewGCThing<JSShortString>(cx, js::gc::FINALIZE_SHORT_STRING, sizeof(JSShortString));
return js::gc::NewGCThing<JSShortString>(cx, js::gc::FINALIZE_SHORT_STRING, sizeof(JSShortString));
}
inline JSExternalString *
js_NewGCExternalString(JSContext *cx)
{
return NewGCThing<JSExternalString>(cx, js::gc::FINALIZE_EXTERNAL_STRING,
sizeof(JSExternalString));
}
inline JSFunction*
js_NewGCFunction(JSContext *cx)
{
JSFunction *fun = NewGCThing<JSFunction>(cx, js::gc::FINALIZE_FUNCTION, sizeof(JSFunction));
if (fun)
fun->earlyInit(JSObject::FUN_CLASS_RESERVED_SLOTS);
return fun;
return js::gc::NewGCThing<JSExternalString>(cx, js::gc::FINALIZE_EXTERNAL_STRING,
sizeof(JSExternalString));
}
inline JSScript *
js_NewGCScript(JSContext *cx)
{
return NewGCThing<JSScript>(cx, js::gc::FINALIZE_SCRIPT, sizeof(JSScript));
return js::gc::NewGCThing<JSScript>(cx, js::gc::FINALIZE_SCRIPT, sizeof(JSScript));
}
inline js::Shape *
js_NewGCShape(JSContext *cx)
{
return NewGCThing<js::Shape>(cx, js::gc::FINALIZE_SHAPE, sizeof(js::Shape));
return js::gc::NewGCThing<js::Shape>(cx, js::gc::FINALIZE_SHAPE, sizeof(js::Shape));
}
inline js::BaseShape *
js_NewGCBaseShape(JSContext *cx)
{
return js::gc::NewGCThing<js::BaseShape>(cx, js::gc::FINALIZE_BASE_SHAPE, sizeof(js::BaseShape));
}
#if JS_HAS_XML_SUPPORT

Просмотреть файл

@ -230,14 +230,27 @@ MarkShape(JSTracer *trc, const MarkablePtr<const Shape> &shape, const char *name
MarkShapeUnbarriered(trc, shape.value, name);
}
void
MarkBaseShapeUnbarriered(JSTracer *trc, BaseShape *base, const char *name)
{
JS_ASSERT(trc);
JS_ASSERT(base);
JS_SET_TRACING_NAME(trc, name);
Mark(trc, base);
}
void
MarkBaseShape(JSTracer *trc, const MarkablePtr<BaseShape> &base, const char *name)
{
MarkBaseShapeUnbarriered(trc, base.value, name);
}
void
MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *name)
{
JS_ASSERT(trc);
JS_ASSERT(type);
JS_SET_TRACING_NAME(trc, name);
if (type == &types::emptyTypeObject)
return;
Mark(trc, type);
/*
@ -247,7 +260,7 @@ MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *na
* members, and we don't need to handle them here.
*/
if (IS_GC_MARKING_TRACER(trc)) {
if (type->singleton)
if (type->singleton && !type->lazy())
MarkObject(trc, type->singleton, "type_singleton");
if (type->interpretedFunction)
MarkObject(trc, type->interpretedFunction, "type_function");
@ -349,6 +362,20 @@ PushMarkStack(GCMarker *gcmarker, const Shape *thing)
ScanShape(gcmarker, thing);
}
static void
ScanBaseShape(GCMarker *gcmarker, BaseShape *base);
void
PushMarkStack(GCMarker *gcmarker, BaseShape *thing)
{
JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment,
thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment);
/* We mark base shapes directly rather than pushing on the stack. */
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
ScanBaseShape(gcmarker, thing);
}
static void
MarkAtomRange(JSTracer *trc, size_t len, JSAtom **vec, const char *name)
{
@ -447,6 +474,9 @@ MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind)
case JSTRACE_SHAPE:
Mark(trc, reinterpret_cast<Shape *>(thing));
break;
case JSTRACE_BASE_SHAPE:
Mark(trc, reinterpret_cast<BaseShape *>(thing));
break;
case JSTRACE_TYPE_OBJECT:
MarkTypeObjectUnbarriered(trc, reinterpret_cast<types::TypeObject *>(thing), "type_stack");
break;
@ -665,17 +695,19 @@ PrintPropertyGetterOrSetter(JSTracer *trc, char *buf, size_t bufsize)
{
JS_ASSERT(trc->debugPrinter == PrintPropertyGetterOrSetter);
Shape *shape = (Shape *)trc->debugPrintArg;
PrintPropertyId(buf, bufsize, shape->propid,
PrintPropertyId(buf, bufsize, shape->propid(),
trc->debugPrintIndex ? js_setter_str : js_getter_str);
}
#ifdef DEBUG
static void
PrintPropertyMethod(JSTracer *trc, char *buf, size_t bufsize)
{
JS_ASSERT(trc->debugPrinter == PrintPropertyMethod);
Shape *shape = (Shape *)trc->debugPrintArg;
PrintPropertyId(buf, bufsize, shape->propid, " method");
PrintPropertyId(buf, bufsize, shape->propid(), " method");
}
#endif /* DEBUG */
static inline void
ScanValue(GCMarker *gcmarker, const Value &v)
@ -695,28 +727,35 @@ static void
ScanShape(GCMarker *gcmarker, const Shape *shape)
{
restart:
JSRuntime *rt = gcmarker->runtime;
if (rt->gcRegenShapes)
shape->shapeid = js_RegenerateShapeForGC(rt);
PushMarkStack(gcmarker, shape->base());
if (JSID_IS_STRING(shape->propid))
PushMarkStack(gcmarker, JSID_TO_STRING(shape->propid));
else if (JS_UNLIKELY(JSID_IS_OBJECT(shape->propid)))
PushMarkStack(gcmarker, JSID_TO_OBJECT(shape->propid));
if (shape->hasGetterValue() && shape->getter())
PushMarkStack(gcmarker, shape->getterObject());
if (shape->hasSetterValue() && shape->setter())
PushMarkStack(gcmarker, shape->setterObject());
if (shape->isMethod())
PushMarkStack(gcmarker, &shape->methodObject());
jsid id = shape->maybePropid();
if (JSID_IS_STRING(id))
PushMarkStack(gcmarker, JSID_TO_STRING(id));
else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
PushMarkStack(gcmarker, JSID_TO_OBJECT(id));
shape = shape->previous();
if (shape && shape->markIfUnmarked(gcmarker->getMarkColor()))
goto restart;
}
static void
ScanBaseShape(GCMarker *gcmarker, BaseShape *base)
{
if (base->hasGetterObject())
PushMarkStack(gcmarker, base->getterObject());
if (base->hasSetterObject())
PushMarkStack(gcmarker, base->setterObject());
if (base->isOwned())
PushMarkStack(gcmarker, base->baseUnowned());
if (JSObject *parent = base->getObjectParent())
PushMarkStack(gcmarker, parent);
}
static inline void
ScanRope(GCMarker *gcmarker, JSRope *rope)
{
@ -762,21 +801,14 @@ static const uintN LARGE_OBJECT_CHUNK_SIZE = 2048;
static void
ScanObject(GCMarker *gcmarker, JSObject *obj)
{
if (obj->isNewborn())
return;
types::TypeObject *type = obj->typeFromGC();
if (type != &types::emptyTypeObject)
PushMarkStack(gcmarker, type);
PushMarkStack(gcmarker, type);
if (JSObject *parent = obj->getParent())
PushMarkStack(gcmarker, parent);
js::Shape *shape = obj->lastProperty();
PushMarkStack(gcmarker, shape);
/*
* Call the trace hook if necessary, and check for a newType on objects
* which are not dense arrays (dense arrays have trace hooks).
*/
Class *clasp = obj->getClass();
/* Call the trace hook if necessary. */
Class *clasp = shape->getObjectClass();
if (clasp->trace) {
if (clasp == &ArrayClass) {
if (obj->getDenseArrayInitializedLength() > LARGE_OBJECT_CHUNK_SIZE) {
@ -786,31 +818,12 @@ ScanObject(GCMarker *gcmarker, JSObject *obj)
clasp->trace(gcmarker, obj);
}
} else {
if (obj->newType)
PushMarkStack(gcmarker, obj->newType);
clasp->trace(gcmarker, obj);
}
} else {
if (obj->newType)
PushMarkStack(gcmarker, obj->newType);
}
if (obj->isNative()) {
js::Shape *shape = obj->lastProp;
PushMarkStack(gcmarker, shape);
if (gcmarker->runtime->gcRegenShapes) {
/* We need to regenerate our shape if hasOwnShape(). */
uint32 newShape = shape->shapeid;
if (obj->hasOwnShape()) {
newShape = js_RegenerateShapeForGC(gcmarker->runtime);
JS_ASSERT(newShape != shape->shapeid);
}
obj->objShape = newShape;
}
if (shape->isNative()) {
uint32 nslots = obj->slotSpan();
JS_ASSERT(obj->slotSpan() <= obj->numSlots());
if (nslots > LARGE_OBJECT_CHUNK_SIZE) {
if (gcmarker->largeStack.push(LargeMarkItem(obj)))
return;
@ -851,26 +864,16 @@ ScanLargeObject(GCMarker *gcmarker, LargeMarkItem &item)
void
MarkChildren(JSTracer *trc, JSObject *obj)
{
/* If obj has no map, it must be a newborn. */
if (obj->isNewborn())
return;
MarkTypeObject(trc, obj->typeFromGC(), "type");
/* Trace universal (ops-independent) members. */
if (!obj->isDenseArray() && obj->newType)
MarkTypeObject(trc, obj->newType, "new_type");
if (obj->parent)
MarkObject(trc, obj->parent, "parent");
Shape *shape = obj->lastProperty();
MarkShapeUnbarriered(trc, shape, "shape");
Class *clasp = obj->getClass();
Class *clasp = shape->getObjectClass();
if (clasp->trace)
clasp->trace(trc, obj);
if (obj->isNative()) {
MarkShape(trc, obj->lastProp, "shape");
JS_ASSERT(obj->slotSpan() <= obj->numSlots());
if (shape->isNative()) {
uint32 nslots = obj->slotSpan();
for (uint32 i = 0; i < nslots; i++) {
JS_SET_TRACING_DETAILS(trc, js_PrintObjectSlotName, obj, i);
@ -921,6 +924,9 @@ MarkChildren(JSTracer *trc, JSScript *script)
MarkValueRange(trc, constarray->length, constarray->vector, "consts");
}
if (script->function())
MarkObjectUnbarriered(trc, script->function(), "function");
if (!script->isCachedEval && script->globalObject)
MarkObject(trc, script->globalObject, "object");
@ -937,24 +943,34 @@ void
MarkChildren(JSTracer *trc, const Shape *shape)
{
restart:
MarkId(trc, shape->propid, "propid");
if (shape->hasGetterValue() && shape->getter())
MarkObjectWithPrinterUnbarriered(trc, shape->getterObject(),
PrintPropertyGetterOrSetter, shape, 0);
if (shape->hasSetterValue() && shape->setter())
MarkObjectWithPrinterUnbarriered(trc, shape->setterObject(),
PrintPropertyGetterOrSetter, shape, 1);
if (shape->isMethod())
MarkObjectWithPrinterUnbarriered(trc, &shape->methodObject(),
PrintPropertyMethod, shape, 0);
MarkBaseShapeUnbarriered(trc, shape->base(), "base");
MarkIdUnbarriered(trc, shape->maybePropid(), "propid");
shape = shape->previous();
if (shape)
goto restart;
}
void
MarkChildren(JSTracer *trc, BaseShape *base)
{
if (base->hasGetterObject()) {
MarkObjectWithPrinterUnbarriered(trc, base->getterObject(),
PrintPropertyGetterOrSetter, base, 0);
}
if (base->hasSetterObject()) {
MarkObjectWithPrinterUnbarriered(trc, base->setterObject(),
PrintPropertyGetterOrSetter, base, 0);
}
if (base->isOwned())
MarkBaseShapeUnbarriered(trc, base->baseUnowned(), "base");
if (JSObject *parent = base->getObjectParent())
MarkObjectUnbarriered(trc, parent, "parent");
}
static void
ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
{
@ -967,13 +983,6 @@ ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
}
}
if (type->emptyShapes) {
for (unsigned i = 0; i < FINALIZE_OBJECT_LIMIT; i++) {
if (type->emptyShapes[i])
PushMarkStack(gcmarker, type->emptyShapes[i]);
}
}
if (type->proto)
PushMarkStack(gcmarker, type->proto);
@ -982,12 +991,15 @@ ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
PushMarkStack(gcmarker, type->newScript->shape);
}
if (type->interpretedFunction)
PushMarkStack(gcmarker, type->interpretedFunction);
/*
* Don't need to trace singleton or functionScript, an object with this
* type must have already been traced and it will also hold a reference
* on the script (singleton and functionScript types cannot be the newType
* of another object). Attempts to mark type objects directly must use
* MarkTypeObject, which will itself mark these extra bits.
* Don't need to trace singleton, an object with this type must have
* already been traced and it will also hold a reference on the script
* (singleton and functionScript types cannot be the newType of another
* object). Attempts to mark type objects directly must use MarkTypeObject,
* which will itself mark these extra bits.
*/
}
@ -1003,17 +1015,10 @@ MarkChildren(JSTracer *trc, types::TypeObject *type)
}
}
if (type->emptyShapes) {
for (unsigned i = 0; i < FINALIZE_OBJECT_LIMIT; i++) {
if (type->emptyShapes[i])
MarkShape(trc, type->emptyShapes[i], "empty_shape");
}
}
if (type->proto)
MarkObject(trc, type->proto, "type_proto");
if (type->singleton)
if (type->singleton && !type->lazy())
MarkObject(trc, type->singleton, "type_singleton");
if (type->newScript) {
@ -1092,6 +1097,10 @@ TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
MarkChildren(trc, static_cast<Shape *>(thing));
break;
case JSTRACE_BASE_SHAPE:
MarkChildren(trc, static_cast<BaseShape *>(thing));
break;
case JSTRACE_TYPE_OBJECT:
MarkChildren(trc, (types::TypeObject *)thing);
break;
@ -1120,7 +1129,6 @@ JSObject::scanSlots(GCMarker *gcmarker)
* Scan the fixed slots and the dynamic slots separately, to avoid
* branching inside nativeGetSlot().
*/
JS_ASSERT(slotSpan() <= numSlots());
unsigned i, nslots = slotSpan();
if (slots) {
unsigned nfixed = numFixedSlots();

Просмотреть файл

@ -81,6 +81,9 @@ MarkShapeUnbarriered(JSTracer *trc, const Shape *shape, const char *name);
void
MarkShape(JSTracer *trc, const MarkablePtr<const Shape> &shape, const char *name);
void
MarkBaseShapeUnbarriered(JSTracer *trc, BaseShape *shape, const char *name);
void
MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *name);

Просмотреть файл

@ -134,6 +134,10 @@ GCMarker::dumpConservativeRoots()
fprintf(fp, "shape");
break;
}
case JSTRACE_BASE_SHAPE: {
fprintf(fp, "base_shape");
break;
}
case JSTRACE_TYPE_OBJECT: {
fprintf(fp, "type_object");
break;

Просмотреть файл

@ -795,7 +795,7 @@ static inline const Shape *
GetSingletonShape(JSContext *cx, JSObject *obj, jsid id)
{
const Shape *shape = obj->nativeLookup(cx, id);
if (shape && shape->hasDefaultGetterOrIsMethod() && shape->slot != SHAPE_INVALID_SLOT)
if (shape && shape->hasDefaultGetterOrIsMethod() && shape->hasSlot())
return shape;
return NULL;
}
@ -814,7 +814,7 @@ ScriptAnalysis::pruneTypeBarriers(JSContext *cx, uint32 offset)
if (barrier->singleton) {
JS_ASSERT(barrier->type.isPrimitive(JSVAL_TYPE_UNDEFINED));
const Shape *shape = GetSingletonShape(cx, barrier->singleton, barrier->singletonId);
if (shape && !barrier->singleton->nativeGetSlot(shape->slot).isUndefined()) {
if (shape && !barrier->singleton->nativeGetSlot(shape->slot()).isUndefined()) {
/*
* When we analyzed the script the singleton had an 'own'
* property which was undefined (probably a 'var' variable
@ -1046,7 +1046,7 @@ PropertyAccess(JSContext *cx, JSScript *script, jsbytecode *pc, TypeObject *obje
* even if no undefined value is ever observed at pc.
*/
const Shape *shape = GetSingletonShape(cx, object->singleton, id);
if (shape && object->singleton->nativeGetSlot(shape->slot).isUndefined())
if (shape && object->singleton->nativeGetSlot(shape->slot()).isUndefined())
script->analysis()->addSingletonTypeBarrier(cx, pc, target, object->singleton, id);
}
} else {
@ -1164,7 +1164,7 @@ TypeConstraintCall::newType(JSContext *cx, TypeSet *source, Type type)
return;
}
if (obj->getFunctionPrivate()->isNative()) {
if (obj->toFunction()->isNative()) {
/*
* The return value and all side effects within native calls should
* be dynamically monitored, except when the compiler is generating
@ -1180,7 +1180,7 @@ TypeConstraintCall::newType(JSContext *cx, TypeSet *source, Type type)
* which specializes particular natives.
*/
Native native = obj->getFunctionPrivate()->native();
Native native = obj->toFunction()->native();
if (native == js::array_push) {
for (size_t i = 0; i < callsite->argumentCount; i++) {
@ -1210,7 +1210,7 @@ TypeConstraintCall::newType(JSContext *cx, TypeSet *source, Type type)
return;
}
callee = obj->getFunctionPrivate();
callee = obj->toFunction();
} else if (type.isTypeObject()) {
callee = type.typeObject()->interpretedFunction;
if (!callee)
@ -1220,7 +1220,7 @@ TypeConstraintCall::newType(JSContext *cx, TypeSet *source, Type type)
return;
}
if (!callee->script()->ensureHasTypes(cx, callee))
if (!callee->script()->ensureHasTypes(cx))
return;
unsigned nargs = callee->nargs;
@ -1283,9 +1283,9 @@ TypeConstraintPropagateThis::newType(JSContext *cx, TypeSet *source, Type type)
if (type.isSingleObject()) {
JSObject *object = type.singleObject();
if (!object->isFunction() || !object->getFunctionPrivate()->isInterpreted())
if (!object->isFunction() || !object->toFunction()->isInterpreted())
return;
callee = object->getFunctionPrivate();
callee = object->toFunction();
} else if (type.isTypeObject()) {
TypeObject *object = type.typeObject();
if (!object->interpretedFunction)
@ -1296,7 +1296,7 @@ TypeConstraintPropagateThis::newType(JSContext *cx, TypeSet *source, Type type)
return;
}
if (!callee->script()->ensureHasTypes(cx, callee))
if (!callee->script()->ensureHasTypes(cx))
return;
TypeSet *thisTypes = TypeScript::ThisTypes(callee->script());
@ -1676,7 +1676,7 @@ types::MarkArgumentsCreated(JSContext *cx, JSScript *script)
mjit::ExpandInlineFrames(cx->compartment);
#endif
if (!script->ensureRanAnalysis(cx))
if (!script->ensureRanAnalysis(cx, NULL))
return;
ScriptAnalysis *analysis = script->analysis();
@ -1961,8 +1961,6 @@ TypeSet::needsBarrier(JSContext *cx)
// TypeCompartment
/////////////////////////////////////////////////////////////////////
TypeObject types::emptyTypeObject(NULL, false, true);
void
TypeCompartment::init(JSContext *cx)
{
@ -1976,7 +1974,7 @@ TypeObject *
TypeCompartment::newTypeObject(JSContext *cx, JSScript *script,
JSProtoKey key, JSObject *proto, bool unknown)
{
TypeObject *object = NewGCThing<TypeObject>(cx, gc::FINALIZE_TYPE_OBJECT, sizeof(TypeObject));
TypeObject *object = gc::NewGCThing<TypeObject>(cx, gc::FINALIZE_TYPE_OBJECT, sizeof(TypeObject));
if (!object)
return NULL;
new(object) TypeObject(proto, key == JSProto_Function, unknown);
@ -2270,7 +2268,7 @@ TypeCompartment::monitorBytecode(JSContext *cx, JSScript *script, uint32 offset,
cx->compartment->types.addPendingRecompile(cx, script);
/* Trigger recompilation of any inline callers. */
if (script->hasFunction && !script->function()->hasLazyType())
if (script->function() && !script->function()->hasLazyType())
ObjectStateChange(cx, script->function()->type(), false, true);
}
@ -2365,7 +2363,7 @@ ScriptAnalysis::addTypeBarrier(JSContext *cx, const jsbytecode *pc, TypeSet *tar
cx->compartment->types.addPendingRecompile(cx, script);
/* Trigger recompilation of any inline callers. */
if (script->hasFunction && !script->function()->hasLazyType())
if (script->function() && !script->function()->hasLazyType())
ObjectStateChange(cx, script->function()->type(), false, true);
}
@ -2398,7 +2396,7 @@ ScriptAnalysis::addSingletonTypeBarrier(JSContext *cx, const jsbytecode *pc, Typ
if (!code.typeBarriers) {
/* Trigger recompilation as for normal type barriers. */
cx->compartment->types.addPendingRecompile(cx, script);
if (script->hasFunction && !script->function()->hasLazyType())
if (script->function() && !script->function()->hasLazyType())
ObjectStateChange(cx, script->function()->type(), false, true);
}
@ -2480,7 +2478,6 @@ GetValueTypeForTable(JSContext *cx, const Value &v)
{
Type type = GetValueType(cx, v);
JS_ASSERT(!type.isSingleObject());
JS_ASSERT_IF(type.isTypeObject(), type.typeObject() != &emptyTypeObject);
return type;
}
@ -2524,7 +2521,7 @@ TypeCompartment::fixArrayType(JSContext *cx, JSObject *obj)
* If the array is heterogenous, keep the existing type object, which has
* unknown properties.
*/
JS_ASSERT(obj->isPackedDenseArray());
JS_ASSERT(obj->isDenseArray());
unsigned len = obj->getDenseArrayInitializedLength();
if (len == 0)
@ -2584,7 +2581,7 @@ struct types::ObjectTableKey
typedef JSObject * Lookup;
static inline uint32 hash(JSObject *obj) {
return (uint32) (JSID_BITS(obj->lastProperty()->propid.get()) ^
return (uint32) (JSID_BITS(obj->lastProperty()->propid()) ^
obj->slotSpan() ^ obj->numFixedSlots() ^
((uint32)(size_t)obj->getProto() >> 2));
}
@ -2596,8 +2593,8 @@ struct types::ObjectTableKey
return false;
}
const Shape *shape = obj->lastProperty();
while (!JSID_IS_EMPTY(shape->propid)) {
if (shape->propid != v.ids[shape->slot])
while (!shape->isEmptyShape()) {
if (shape->propid() != v.ids[shape->slot()])
return false;
shape = shape->previous();
}
@ -2649,11 +2646,11 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
if (types[i].isPrimitive(JSVAL_TYPE_INT32)) {
types[i] = Type::DoubleType();
const Shape *shape = baseShape;
while (!JSID_IS_EMPTY(shape->propid)) {
if (shape->slot == i) {
while (!shape->isEmptyShape()) {
if (shape->slot() == i) {
Type type = Type::DoubleType();
if (!p->value.object->unknownProperties()) {
jsid id = MakeTypeId(cx, shape->propid);
jsid id = MakeTypeId(cx, shape->propid());
p->value.object->addPropertyType(cx, id, type);
}
break;
@ -2689,12 +2686,12 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj)
}
const Shape *shape = baseShape;
while (!JSID_IS_EMPTY(shape->propid)) {
ids[shape->slot] = shape->propid;
types[shape->slot] = GetValueTypeForTable(cx, obj->getSlot(shape->slot));
while (!shape->isEmptyShape()) {
ids[shape->slot()] = shape->propid();
types[shape->slot()] = GetValueTypeForTable(cx, obj->getSlot(shape->slot()));
if (!objType->unknownProperties()) {
jsid id = MakeTypeId(cx, shape->propid);
objType->addPropertyType(cx, id, types[shape->slot]);
jsid id = MakeTypeId(cx, shape->propid());
objType->addPropertyType(cx, id, types[shape->slot()]);
}
shape = shape->previous();
}
@ -2754,8 +2751,8 @@ UpdatePropertyType(JSContext *cx, TypeSet *types, JSObject *obj, const Shape *sh
{
if (shape->hasGetterValue() || shape->hasSetterValue()) {
types->addType(cx, Type::UnknownType());
} else if (shape->hasDefaultGetterOrIsMethod() && shape->slot != SHAPE_INVALID_SLOT) {
const Value &value = obj->nativeGetSlot(shape->slot);
} else if (shape->hasDefaultGetterOrIsMethod() && shape->hasSlot()) {
const Value &value = obj->nativeGetSlot(shape->slot());
/*
* Don't add initial undefined types for singleton properties that are
@ -2791,12 +2788,12 @@ TypeObject::addProperty(JSContext *cx, jsid id, Property **pprop)
if (JSID_IS_VOID(id)) {
/* Go through all shapes on the object to get integer-valued properties. */
const Shape *shape = singleton->lastProperty();
while (!JSID_IS_EMPTY(shape->propid)) {
if (JSID_IS_VOID(MakeTypeId(cx, shape->propid)))
while (!shape->isEmptyShape()) {
if (JSID_IS_VOID(MakeTypeId(cx, shape->propid())))
UpdatePropertyType(cx, &base->types, singleton, shape, true);
shape = shape->previous();
}
} else {
} else if (!JSID_IS_EMPTY(id)) {
const Shape *shape = singleton->nativeLookup(cx, id);
if (shape)
UpdatePropertyType(cx, &base->types, singleton, shape, false);
@ -2830,14 +2827,14 @@ TypeObject::addDefiniteProperties(JSContext *cx, JSObject *obj)
AutoEnterTypeInference enter(cx);
const Shape *shape = obj->lastProperty();
while (!JSID_IS_EMPTY(shape->propid)) {
jsid id = MakeTypeId(cx, shape->propid);
if (!JSID_IS_VOID(id) && obj->isFixedSlot(shape->slot) &&
shape->slot <= (TYPE_FLAG_DEFINITE_MASK >> TYPE_FLAG_DEFINITE_SHIFT)) {
while (!shape->isEmptyShape()) {
jsid id = MakeTypeId(cx, shape->propid());
if (!JSID_IS_VOID(id) && obj->isFixedSlot(shape->slot()) &&
shape->slot() <= (TYPE_FLAG_DEFINITE_MASK >> TYPE_FLAG_DEFINITE_SHIFT)) {
TypeSet *types = getProperty(cx, id, true);
if (!types)
return false;
types->setDefinite(shape->slot);
types->setDefinite(shape->slot());
}
shape = shape->previous();
}
@ -2858,8 +2855,8 @@ TypeObject::matchDefiniteProperties(JSObject *obj)
bool found = false;
const Shape *shape = obj->lastProperty();
while (!JSID_IS_EMPTY(shape->propid)) {
if (shape->slot == slot && shape->propid == prop->id) {
while (!shape->isEmptyShape()) {
if (shape->slot() == slot && shape->propid() == prop->id) {
found = true;
break;
}
@ -2970,7 +2967,7 @@ TypeObject::setFlags(JSContext *cx, TypeObjectFlags flags)
JS_ASSERT_IF(flags & OBJECT_FLAG_REENTRANT_FUNCTION,
interpretedFunction->script()->reentrantOuterFunction);
JS_ASSERT_IF(flags & OBJECT_FLAG_ITERATED,
singleton->flags & JSObject::ITERATED);
singleton->lastProperty()->hasObjectFlag(BaseShape::ITERATED_SINGLETON));
}
this->flags |= flags;
@ -3258,7 +3255,7 @@ ScriptAnalysis::resolveNameAccess(JSContext *cx, jsid id, bool addDependency)
JSAtom *atom = JSID_TO_ATOM(id);
JSScript *script = this->script;
while (script->hasFunction && script->nesting()) {
while (script->function() && script->nesting()) {
if (!script->ensureRanInference(cx))
return access;
@ -3407,7 +3404,6 @@ ScriptAnalysis::analyzeTypesBytecode(JSContext *cx, unsigned offset,
case JSOP_DEFAULT:
case JSOP_DEFAULTX:
case JSOP_POPN:
case JSOP_UNBRANDTHIS:
case JSOP_STARTXML:
case JSOP_STARTXMLEXPR:
case JSOP_DEFXMLNS:
@ -3505,7 +3501,7 @@ ScriptAnalysis::analyzeTypesBytecode(JSContext *cx, unsigned offset,
case JSOP_STOP:
/* If a stop is reachable then the return type may be void. */
if (script->hasFunction)
if (script->function())
TypeScript::ReturnTypes(script)->addType(cx, Type::UndefinedType());
break;
@ -3788,7 +3784,7 @@ ScriptAnalysis::analyzeTypesBytecode(JSContext *cx, unsigned offset,
case JSOP_RETURN:
case JSOP_SETRVAL:
if (script->hasFunction)
if (script->function())
poppedTypes(pc, 0)->addSubset(cx, TypeScript::ReturnTypes(script));
break;
@ -4041,12 +4037,8 @@ ScriptAnalysis::analyzeTypesBytecode(JSContext *cx, unsigned offset,
poppedTypes(pc, 1)->addSubset(cx, &pushed[0]);
break;
case JSOP_UNBRAND:
poppedTypes(pc, 0)->addSubset(cx, &pushed[0]);
break;
case JSOP_GENERATOR:
if (script->hasFunction) {
if (script->function()) {
if (script->hasGlobal()) {
JSObject *proto = script->global()->getOrCreateGeneratorPrototype(cx);
if (!proto)
@ -4163,7 +4155,7 @@ ScriptAnalysis::analyzeTypes(JSContext *cx)
for (unsigned i = 0; i < script->nfixed; i++)
TypeScript::LocalTypes(script, i)->addType(cx, Type::UndefinedType());
TypeScriptNesting *nesting = script->hasFunction ? script->nesting() : NULL;
TypeScriptNesting *nesting = script->function() ? script->nesting() : NULL;
if (nesting && nesting->parent) {
/*
* Check whether NAME accesses can be resolved in parent scopes, and
@ -4658,11 +4650,11 @@ AnalyzeNewScriptProperties(JSContext *cx, TypeObject *type, JSFunction *fun, JSO
JSObject *funcallObj = funcallTypes->getSingleton(cx, false);
JSObject *scriptObj = scriptTypes->getSingleton(cx, false);
if (!funcallObj || !scriptObj || !scriptObj->isFunction() ||
!scriptObj->getFunctionPrivate()->isInterpreted()) {
!scriptObj->toFunction()->isInterpreted()) {
return false;
}
JSFunction *function = scriptObj->getFunctionPrivate();
JSFunction *function = scriptObj->toFunction();
JS_ASSERT(!function->script()->isInnerFunction);
/*
@ -4850,7 +4842,7 @@ ScriptAnalysis::printTypes(JSContext *cx)
#ifdef DEBUG
if (script->hasFunction)
if (script->function())
printf("Function");
else if (script->isCachedEval)
printf("Eval");
@ -4864,7 +4856,7 @@ ScriptAnalysis::printTypes(JSContext *cx)
printf("\n this:");
TypeScript::ThisTypes(script)->print(cx);
for (unsigned i = 0; script->hasFunction && i < script->function()->nargs; i++) {
for (unsigned i = 0; script->function() && i < script->function()->nargs; i++) {
printf("\n arg%u:", i);
TypeScript::ArgTypes(script, i)->print(cx);
}
@ -4989,7 +4981,7 @@ MarkIteratorUnknownSlow(JSContext *cx)
}
/* Trigger recompilation of any inline callers. */
if (script->hasFunction && !script->function()->hasLazyType())
if (script->function() && !script->function()->hasLazyType())
ObjectStateChange(cx, script->function()->type(), false, true);
}
@ -4997,8 +4989,8 @@ void
TypeMonitorCallSlow(JSContext *cx, JSObject *callee,
const CallArgs &args, bool constructing)
{
unsigned nargs = callee->getFunctionPrivate()->nargs;
JSScript *script = callee->getFunctionPrivate()->script();
unsigned nargs = callee->toFunction()->nargs;
JSScript *script = callee->toFunction()->script();
if (!constructing)
TypeScript::SetThis(cx, script, args.thisv());
@ -5034,7 +5026,7 @@ TypeDynamicResult(JSContext *cx, JSScript *script, jsbytecode *pc, Type type)
/* Directly update associated type sets for applicable bytecodes. */
if (js_CodeSpec[*pc].format & JOF_TYPESET) {
if (!script->ensureRanAnalysis(cx)) {
if (!script->ensureRanAnalysis(cx, NULL)) {
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
@ -5127,7 +5119,7 @@ TypeDynamicResult(JSContext *cx, JSScript *script, jsbytecode *pc, Type type)
}
/* Trigger recompilation of any inline callers. */
if (script->hasFunction && !script->function()->hasLazyType())
if (script->function() && !script->function()->hasLazyType())
ObjectStateChange(cx, script->function()->type(), false, true);
}
@ -5142,7 +5134,7 @@ TypeMonitorResult(JSContext *cx, JSScript *script, jsbytecode *pc, const js::Val
AutoEnterTypeInference enter(cx);
if (!script->ensureRanAnalysis(cx)) {
if (!script->ensureRanAnalysis(cx, NULL)) {
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
@ -5162,9 +5154,8 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope)
{
JS_ASSERT(script->types && !script->types->hasScope());
JSFunction *fun = script->types->function;
JSFunction *fun = script->function();
JS_ASSERT(script->hasFunction == (fun != NULL));
JS_ASSERT_IF(!fun, !script->isOuterFunction && !script->isInnerFunction);
JS_ASSERT_IF(!scope, fun && !script->isInnerFunction);
@ -5183,6 +5174,14 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope)
JS_ASSERT_IF(fun && scope, fun->getGlobal() == scope->getGlobal());
script->types->global = fun ? fun->getGlobal() : scope->getGlobal();
/*
* Update the parent in the script's bindings. The bindings are created
* with a NULL parent, and fixing the parent now avoids the need to reshape
* every time a call object is created from the bindings.
*/
if (!script->bindings.setParent(cx, script->types->global))
return false;
if (!cx->typeInferenceEnabled())
return true;
@ -5204,7 +5203,7 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope)
* the script is nested inside.
*/
while (!scope->isCall())
scope = scope->getParent();
scope = scope->internalScopeChain();
CallObject &call = scope->asCall();
@ -5234,10 +5233,10 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope)
* the parent's call object as the most recent one, so that it is not
* marked as reentrant.
*/
if (!parent->ensureHasTypes(cx, parentFun))
if (!parent->ensureHasTypes(cx))
return false;
if (!parent->types->hasScope()) {
if (!SetScope(cx, parent, scope->getParent()))
if (!SetScope(cx, parent, scope->internalScopeChain()))
return false;
parent->nesting()->activeCall = scope;
parent->nesting()->argArray = Valueify(call.argArray());
@ -5332,7 +5331,7 @@ CheckNestingParent(JSContext *cx, JSObject *scope, JSScript *script)
JS_ASSERT(parent);
while (!scope->isCall() || scope->asCall().getCalleeFunction()->script() != parent)
scope = scope->getParent();
scope = scope->internalScopeChain();
if (scope != parent->nesting()->activeCall) {
parent->reentrantOuterFunction = true;
@ -5347,7 +5346,7 @@ CheckNestingParent(JSContext *cx, JSObject *scope, JSScript *script)
* parent.
*/
if (parent->nesting()->parent) {
scope = scope->getParent();
scope = scope->internalScopeChain();
script = parent;
goto restart;
}
@ -5475,23 +5474,21 @@ IgnorePushed(const jsbytecode *pc, unsigned index)
}
bool
JSScript::makeTypes(JSContext *cx, JSFunction *fun)
JSScript::makeTypes(JSContext *cx)
{
JS_ASSERT(!types);
JS_ASSERT(hasFunction == (fun != NULL));
if (!cx->typeInferenceEnabled()) {
types = (TypeScript *) cx->calloc_(sizeof(TypeScript));
if (!types)
return false;
new(types) TypeScript(fun);
new(types) TypeScript();
return true;
}
AutoEnterTypeInference enter(cx);
/* Open code for NumTypeSets since the types are not filled in yet. */
unsigned count = 2 + (fun ? fun->nargs : 0) + nfixed + nTypeSets;
unsigned count = TypeScript::NumTypeSets(this);
types = (TypeScript *) cx->calloc_(sizeof(TypeScript) + (sizeof(TypeSet) * count));
if (!types) {
@ -5499,7 +5496,7 @@ JSScript::makeTypes(JSContext *cx, JSFunction *fun)
return false;
}
new(types) TypeScript(fun);
new(types) TypeScript();
#ifdef DEBUG
TypeSet *typeArray = types->typeArray();
@ -5515,7 +5512,7 @@ JSScript::makeTypes(JSContext *cx, JSFunction *fun)
InferSpew(ISpewOps, "typeSet: %sT%p%s this #%u",
InferSpewColor(thisTypes), thisTypes, InferSpewColorReset(),
id());
unsigned nargs = hasFunction ? function()->nargs : 0;
unsigned nargs = function() ? function()->nargs : 0;
for (unsigned i = 0; i < nargs; i++) {
TypeSet *types = TypeScript::ArgTypes(this, i);
InferSpew(ISpewOps, "typeSet: %sT%p%s arg%u #%u",
@ -5558,9 +5555,7 @@ JSScript::makeAnalysis(JSContext *cx)
bool
JSScript::typeSetFunction(JSContext *cx, JSFunction *fun, bool singleton)
{
hasFunction = true;
if (fun->isHeavyweight())
isHeavyweightFunction = true;
function_ = fun;
if (!cx->typeInferenceEnabled())
return true;
@ -5661,7 +5656,7 @@ JSObject::splicePrototype(JSContext *cx, JSObject *proto)
}
if (!cx->typeInferenceEnabled()) {
TypeObject *type = proto ? proto->getNewType(cx) : &emptyTypeObject;
TypeObject *type = proto ? proto->getNewType(cx) : cx->compartment->getEmptyType(cx);
if (!type)
return false;
type_ = type;
@ -5707,8 +5702,8 @@ JSObject::makeLazyType(JSContext *cx)
type->singleton = this;
if (isFunction() && getFunctionPrivate() && getFunctionPrivate()->isInterpreted()) {
type->interpretedFunction = getFunctionPrivate();
if (isFunction() && toFunction()->isInterpreted()) {
type->interpretedFunction = toFunction();
JSScript *script = type->interpretedFunction->script();
if (script->createdArgs)
type->flags |= OBJECT_FLAG_CREATED_ARGUMENTS;
@ -5718,7 +5713,7 @@ JSObject::makeLazyType(JSContext *cx)
type->flags |= OBJECT_FLAG_REENTRANT_FUNCTION;
}
if (flags & ITERATED)
if (lastProperty()->hasObjectFlag(BaseShape::ITERATED_SINGLETON))
type->flags |= OBJECT_FLAG_ITERATED;
#if JS_HAS_XML_SUPPORT
@ -5730,12 +5725,11 @@ JSObject::makeLazyType(JSContext *cx)
type->markUnknown(cx);
#endif
if (clasp->ext.equality)
if (getClass()->ext.equality)
type->flags |= OBJECT_FLAG_SPECIAL_EQUALITY;
if (type->unknownProperties()) {
type_ = type;
flags &= ~LAZY_TYPE;
return;
}
@ -5745,24 +5739,101 @@ JSObject::makeLazyType(JSContext *cx)
| OBJECT_FLAG_NON_TYPED_ARRAY;
type_ = type;
flags &= ~LAZY_TYPE;
}
void
JSObject::makeNewType(JSContext *cx, JSFunction *fun, bool unknown)
/* static */ inline HashNumber
TypeObjectEntry::hash(JSObject *proto)
{
JS_ASSERT(!newType);
return PointerHasher<JSObject *, 3>::hash(proto);
}
/* static */ inline bool
TypeObjectEntry::match(TypeObject *key, JSObject *lookup)
{
return key->proto == lookup;
}
#ifdef DEBUG
bool
JSObject::hasNewType(TypeObject *type)
{
TypeObjectSet &table = compartment()->newTypeObjects;
if (!table.initialized())
return false;
TypeObjectSet::Ptr p = table.lookup(this);
return p && *p == type;
}
#endif /* DEBUG */
bool
JSObject::setNewTypeUnknown(JSContext *cx)
{
if (!setFlag(cx, js::BaseShape::NEW_TYPE_UNKNOWN))
return false;
/*
* If the object already has a new type, mark that type as unknown. It will
* not have the SETS_MARKED_UNKNOWN bit set, so may require a type set
* crawl if prototypes of the object change dynamically in the future.
*/
TypeObjectSet &table = cx->compartment->newTypeObjects;
if (table.initialized()) {
if (TypeObjectSet::Ptr p = table.lookup(this))
MarkTypeObjectUnknownProperties(cx, *p);
}
return true;
}
TypeObject *
JSObject::getNewType(JSContext *cx, JSFunction *fun)
{
if (!setDelegate(cx))
return NULL;
TypeObjectSet &table = cx->compartment->newTypeObjects;
if (!table.initialized() && !table.init())
return NULL;
TypeObjectSet::AddPtr p = table.lookupForAdd(this);
if (p) {
TypeObject *type = *p;
/*
* If set, the type's newScript indicates the script used to create
* all objects in existence which have this type. If there are objects
* in existence which are not created by calling 'new' on newScript,
* we must clear the new script information from the type and will not
* be able to assume any definite properties for instances of the type.
* This case is rare, but can happen if, for example, two scripted
* functions have the same value for their 'prototype' property, or if
* Object.create is called with a prototype object that is also the
* 'prototype' property of some scripted function.
*/
if (type->newScript && type->newScript->fun != fun)
type->clearNewScript(cx);
if (cx->compartment->needsBarrier())
TypeObject::readBarrier(type);
return type;
}
bool markUnknown = lastProperty()->hasObjectFlag(BaseShape::NEW_TYPE_UNKNOWN);
TypeObject *type = cx->compartment->types.newTypeObject(cx, NULL,
JSProto_Object, this, unknown);
JSProto_Object, this, markUnknown);
if (!type)
return;
return NULL;
newType.init(type);
setDelegate();
if (!table.relookupOrAdd(p, this, type))
return NULL;
if (!cx->typeInferenceEnabled())
return;
return type;
AutoEnterTypeInference enter(cx);
@ -5783,7 +5854,7 @@ JSObject::makeNewType(JSContext *cx, JSFunction *fun, bool unknown)
type->flags |= OBJECT_FLAG_UNKNOWN_MASK;
#endif
if (clasp->ext.equality)
if (getClass()->ext.equality)
type->flags |= OBJECT_FLAG_SPECIAL_EQUALITY;
/*
@ -5796,6 +5867,40 @@ JSObject::makeNewType(JSContext *cx, JSFunction *fun, bool unknown)
*/
if (type->unknownProperties())
type->flags |= OBJECT_FLAG_SETS_MARKED_UNKNOWN;
return type;
}
TypeObject *
JSCompartment::getLazyType(JSContext *cx, JSObject *proto)
{
TypeObjectSet &table = cx->compartment->lazyTypeObjects;
if (!table.initialized() && !table.init())
return NULL;
TypeObjectSet::AddPtr p = table.lookupForAdd(proto);
if (p) {
TypeObject *type = *p;
JS_ASSERT(type->lazy());
if (cx->compartment->needsBarrier())
TypeObject::readBarrier(type);
return type;
}
TypeObject *type = cx->compartment->types.newTypeObject(cx, NULL,
JSProto_Object, proto, false);
if (!type)
return NULL;
if (!table.relookupOrAdd(p, proto, type))
return NULL;
type->singleton = (JSObject *) TypeObject::LAZY_SINGLETON;
return type;
}
/////////////////////////////////////////////////////////////////////
@ -5847,15 +5952,6 @@ TypeSet::sweep(JSContext *cx, JSCompartment *compartment)
flags &= ~TYPE_FLAG_PROPAGATED_PROPERTY;
}
inline void
JSObject::revertLazyType()
{
JS_ASSERT(hasSingletonType() && !hasLazyType());
JS_ASSERT_IF(type_->proto, type_->proto->newType);
flags |= LAZY_TYPE;
type_ = (type_->proto) ? type_->proto->newType : &emptyTypeObject;
}
inline void
TypeObject::clearProperties()
{
@ -5880,7 +5976,6 @@ TypeObject::sweep(JSContext *cx)
contribution = 0;
if (singleton) {
JS_ASSERT(!emptyShapes);
JS_ASSERT(!newScript);
/*
@ -5889,25 +5984,10 @@ TypeObject::sweep(JSContext *cx)
*/
clearProperties();
if (!isMarked()) {
/*
* Singleton objects do not hold strong references on their types.
* When removing the type, however, we need to fixup the singleton
* so that it has a lazy type again. The generic 'new' type for the
* proto must be live, since the type's prototype and its 'new'
* type are both strong references.
*/
JS_ASSERT_IF(singleton->isMarked() && proto,
proto->isMarked() && proto->newType->isMarked());
singleton->revertLazyType();
}
return;
}
if (!isMarked()) {
if (emptyShapes)
Foreground::free_(emptyShapes);
if (newScript)
Foreground::free_(newScript);
return;
@ -5991,9 +6071,6 @@ struct SweepTypeObjectOp
void
SweepTypeObjects(JSContext *cx, JSCompartment *compartment)
{
JS_ASSERT(!emptyTypeObject.emptyShapes);
JS_ASSERT(!emptyTypeObject.newScript);
SweepTypeObjectOp op(cx);
gc::ForEachArenaAndCell(compartment, gc::FINALIZE_TYPE_OBJECT, gc::EmptyArenaOp, op);
}
@ -6077,6 +6154,18 @@ TypeCompartment::sweep(JSContext *cx)
pendingCapacity = 0;
}
void
JSCompartment::sweepNewTypeObjectTable(JSContext *cx, TypeObjectSet &table)
{
if (table.initialized()) {
for (TypeObjectSet::Enum e(table); !e.empty(); e.popFront()) {
TypeObject *type = e.front();
if (!type->isMarked())
e.removeFront();
}
}
}
TypeCompartment::~TypeCompartment()
{
if (pendingArray)
@ -6275,7 +6364,7 @@ JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats, J
* every GC. The type object is normally destroyed too, but we don't
* charge this to 'temporary' as this is not for GC heap values.
*/
JS_ASSERT(!object->newScript && !object->emptyShapes);
JS_ASSERT(!object->newScript);
return;
}
@ -6290,12 +6379,6 @@ JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats, J
stats->objects += mallocSizeOf(object->newScript, computedSize);
}
if (object->emptyShapes) {
stats->emptyShapes +=
mallocSizeOf(object->emptyShapes,
sizeof(EmptyShape*) * gc::FINALIZE_OBJECT_LIMIT);
}
/*
* This counts memory that is in the temp pool but gets attributed
* elsewhere. See JS_GetTypeInferenceMemoryStats for more details.

Просмотреть файл

@ -731,19 +731,16 @@ struct TypeObject : gc::Cell
*/
HeapPtrObject singleton;
/* Lazily filled array of empty shapes for each size of objects with this type. */
HeapPtr<EmptyShape> *emptyShapes;
/*
* Value held by singleton if this is a standin type for a singleton JS
* object whose type has not been constructed yet.
*/
static const size_t LAZY_SINGLETON = 1;
bool lazy() const { return singleton == (JSObject *) LAZY_SINGLETON; }
/* Flags for this object. */
TypeObjectFlags flags;
/*
* If non-NULL, objects of this type have always been constructed using
* 'new' on the specified script, which adds some number of properties to
* the object in a definite order before the object escapes.
*/
HeapPtr<TypeNewScript> newScript;
/*
* Estimate of the contribution of this object to the type sets it appears in.
* This is the sum of the sizes of those sets at the point when the object
@ -758,6 +755,13 @@ struct TypeObject : gc::Cell
uint32 contribution;
static const uint32 CONTRIBUTION_LIMIT = 2000;
/*
* If non-NULL, objects of this type have always been constructed using
* 'new' on the specified script, which adds some number of properties to
* the object in a definite order before the object escapes.
*/
HeapPtr<TypeNewScript> newScript;
/*
* Properties of this object. This may contain JSID_VOID, representing the
* types of all integer indexes of the object, and/or JSID_EMPTY, holding
@ -793,6 +797,10 @@ struct TypeObject : gc::Cell
/* If this is an interpreted function, the function object. */
HeapPtrFunction interpretedFunction;
#if JS_BITS_PER_WORD == 32
void *padding;
#endif
inline TypeObject(JSObject *proto, bool isFunction, bool unknown);
bool isFunction() { return !!(flags & OBJECT_FLAG_FUNCTION); }
@ -812,16 +820,6 @@ struct TypeObject : gc::Cell
return !!(flags & OBJECT_FLAG_UNKNOWN_PROPERTIES);
}
/*
* Return an immutable, shareable, empty shape with the same clasp as this
* and the same slotSpan as this had when empty.
*
* If |this| is the scope of an object |proto|, the resulting scope can be
* used as the scope of a new object whose prototype is |proto|.
*/
inline bool canProvideEmptyShape(js::Class *clasp);
inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp, gc::AllocKind kind);
/*
* Get or create a property of this object. Only call this for properties which
* a script accesses explicitly. 'assign' indicates whether this is for an
@ -874,10 +872,11 @@ struct TypeObject : gc::Cell
* object pending deletion is released when weak references are sweeped
* from all the compartment's type objects.
*/
void finalize(JSContext *cx) {}
void finalize(JSContext *cx, bool background) {}
static inline void writeBarrierPre(TypeObject *type);
static inline void writeBarrierPost(TypeObject *type, void *addr);
static inline void readBarrier(TypeObject *type);
private:
inline uint32 basePropertyCount() const;
@ -888,8 +887,18 @@ struct TypeObject : gc::Cell
}
};
/* Global singleton for the generic type of objects with no prototype. */
extern TypeObject emptyTypeObject;
/*
* Entries for the per-compartment set of type objects which are the default
* 'new' or the lazy types of some prototype.
*/
struct TypeObjectEntry
{
typedef JSObject *Lookup;
static inline HashNumber hash(JSObject *base);
static inline bool match(TypeObject *key, JSObject *lookup);
};
typedef HashSet<TypeObject *, TypeObjectEntry, SystemAllocPolicy> TypeObjectSet;
/*
* Call to mark a script's arguments as having been created, recompile any
@ -1020,9 +1029,6 @@ class TypeScript
/* Analysis information for the script, cleared on each GC. */
analyze::ScriptAnalysis *analysis;
/* Function for the script, if it has one. */
HeapPtrFunction function;
/*
* Information about the scope in which a script executes. This information
* is not set until the script has executed at least once and SetScope
@ -1041,8 +1047,7 @@ class TypeScript
/* Dynamic types generated at points within this script. */
TypeResult *dynamicList;
inline TypeScript(JSFunction *fun);
inline ~TypeScript();
inline TypeScript();
bool hasScope() { return size_t(global.get()) != GLOBAL_MISSING_SCOPE; }
@ -1270,4 +1275,8 @@ void TypeFailure(JSContext *cx, const char *fmt, ...);
} /* namespace types */
} /* namespace js */
namespace JS {
template<> class AnchorPermitted<js::types::TypeObject *> { };
}
#endif // jsinfer_h___

Просмотреть файл

@ -321,10 +321,10 @@ TypeMonitorCall(JSContext *cx, const js::CallArgs &args, bool constructing)
JSObject *callee = &args.callee();
if (callee->isFunction()) {
JSFunction *fun = callee->getFunctionPrivate();
JSFunction *fun = callee->toFunction();
if (fun->isInterpreted()) {
JSScript *script = fun->script();
if (!script->ensureRanAnalysis(cx, fun, callee->getParent()))
if (!script->ensureRanAnalysis(cx, fun->environment()))
return;
if (cx->typeInferenceEnabled())
TypeMonitorCallSlow(cx, callee, args, constructing);
@ -460,15 +460,9 @@ UseNewTypeAtEntry(JSContext *cx, StackFrame *fp)
/////////////////////////////////////////////////////////////////////
inline
TypeScript::TypeScript(JSFunction *fun)
: function(fun),
global((js::GlobalObject *) GLOBAL_MISSING_SCOPE)
{
}
inline
TypeScript::~TypeScript()
TypeScript::TypeScript()
{
this->global = (js::GlobalObject *) GLOBAL_MISSING_SCOPE;
}
/* static */ inline unsigned
@ -700,8 +694,6 @@ TypeScript::SetArgument(JSContext *cx, JSScript *script, unsigned arg, const js:
void
TypeScript::trace(JSTracer *trc)
{
if (function)
gc::MarkObject(trc, function, "script_fun");
if (hasScope() && global)
gc::MarkObject(trc, global, "script_global");
@ -1268,7 +1260,7 @@ inline void
TypeObject::writeBarrierPre(TypeObject *type)
{
#ifdef JSGC_INCREMENTAL
if (!type || type == &js::types::emptyTypeObject)
if (!type)
return;
JSCompartment *comp = type->compartment();
@ -1282,6 +1274,17 @@ TypeObject::writeBarrierPost(TypeObject *type, void *addr)
{
}
inline void
TypeObject::readBarrier(TypeObject *type)
{
#ifdef JSGC_INCREMENTAL
JSCompartment *comp = type->compartment();
JS_ASSERT(comp->needsBarrier());
MarkTypeObjectUnbarriered(comp->barrierTracer(), type, "read barrier");
#endif
}
inline void
TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
{
@ -1317,15 +1320,15 @@ Property::Property(const Property &o)
} } /* namespace js::types */
inline bool
JSScript::ensureHasTypes(JSContext *cx, JSFunction *fun)
JSScript::ensureHasTypes(JSContext *cx)
{
return types || makeTypes(cx, fun);
return types || makeTypes(cx);
}
inline bool
JSScript::ensureRanAnalysis(JSContext *cx, JSFunction *fun, JSObject *scope)
JSScript::ensureRanAnalysis(JSContext *cx, JSObject *scope)
{
if (!ensureHasTypes(cx, fun))
if (!ensureHasTypes(cx))
return false;
if (!types->hasScope() && !js::types::TypeScript::SetScope(cx, this, scope))
return false;
@ -1338,7 +1341,7 @@ JSScript::ensureRanAnalysis(JSContext *cx, JSFunction *fun, JSObject *scope)
inline bool
JSScript::ensureRanInference(JSContext *cx)
{
if (!ensureRanAnalysis(cx))
if (!ensureRanAnalysis(cx, NULL))
return false;
if (!analysis()->ranInference()) {
js::types::AutoEnterTypeInference enter(cx);
@ -1376,4 +1379,12 @@ js::analyze::ScriptAnalysis::addPushedType(JSContext *cx, uint32 offset, uint32
pushed->addType(cx, type);
}
inline js::types::TypeObject *
JSCompartment::getEmptyType(JSContext *cx)
{
if (!emptyTypeObject)
emptyTypeObject = types.newTypeObject(cx, NULL, JSProto_Object, NULL, true);
return emptyTypeObject;
}
#endif // jsinferinlines_h___

Просмотреть файл

@ -190,7 +190,7 @@ js::GetBlockChain(JSContext *cx, StackFrame *fp)
else if (op == JSOP_ENTERBLOCK)
blockChain = script->getObject(indexBase + GET_INDEX(pc));
else if (op == JSOP_LEAVEBLOCK || op == JSOP_LEAVEBLOCKEXPR)
blockChain = blockChain->getParent();
blockChain = blockChain->getStaticBlockScopeChain();
else if (op == JSOP_BLOCKCHAIN)
blockChain = script->getObject(indexBase + GET_INDEX(pc));
else if (op == JSOP_NULLBLOCKCHAIN)
@ -295,7 +295,7 @@ GetScopeChainFull(JSContext *cx, StackFrame *fp, JSObject *blockChain)
*/
limitClone = &fp->scopeChain();
while (limitClone->isWith())
limitClone = limitClone->getParent();
limitClone = limitClone->internalScopeChain();
JS_ASSERT(limitClone);
/*
@ -342,7 +342,7 @@ GetScopeChainFull(JSContext *cx, StackFrame *fp, JSObject *blockChain)
JSObject *newChild = innermostNewChild;
for (;;) {
JS_ASSERT(newChild->getProto() == sharedBlock);
sharedBlock = sharedBlock->getParent();
sharedBlock = sharedBlock->getStaticBlockScopeChain();
/* Sometimes limitBlock will be NULL, so check that first. */
if (sharedBlock == limitBlock || !sharedBlock)
@ -353,10 +353,12 @@ GetScopeChainFull(JSContext *cx, StackFrame *fp, JSObject *blockChain)
if (!clone)
return NULL;
newChild->setParent(clone);
if (!newChild->setInternalScopeChain(cx, clone))
return NULL;
newChild = clone;
}
newChild->setParent(&fp->scopeChain());
if (!newChild->setInternalScopeChain(cx, &fp->scopeChain()))
return NULL;
/*
@ -422,7 +424,7 @@ js::BoxNonStrictThis(JSContext *cx, const CallReceiver &call)
JS_ASSERT(!thisv.isMagic());
#ifdef DEBUG
JSFunction *fun = call.callee().isFunction() ? call.callee().getFunctionPrivate() : NULL;
JSFunction *fun = call.callee().isFunction() ? call.callee().toFunction() : NULL;
JS_ASSERT_IF(fun && fun->isInterpreted(), !fun->inStrictMode());
#endif
@ -509,12 +511,11 @@ js::OnUnknownMethod(JSContext *cx, Value *vp)
vp[0] = IdToValue(id);
}
#endif
obj = js_NewGCObject(cx, FINALIZE_OBJECT2);
obj = NewObjectWithClassProto(cx, &js_NoSuchMethodClass, NULL, NULL);
if (!obj)
return false;
obj->init(cx, &js_NoSuchMethodClass, &emptyTypeObject, NULL, NULL, false);
obj->setSharedNonNativeMap();
obj->setSlot(JSSLOT_FOUND_FUNCTION, tvr.value());
obj->setSlot(JSSLOT_SAVED_ID, vp[0]);
vp[0].setObject(*obj);
@ -619,7 +620,7 @@ js::InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct)
}
/* Invoke native functions. */
JSFunction *fun = callee.getFunctionPrivate();
JSFunction *fun = callee.toFunction();
JS_ASSERT_IF(construct, !fun->isConstructor());
if (fun->isNative())
return CallJSNative(cx, fun->u.n.native, args);
@ -766,7 +767,7 @@ js::ExecuteKernel(JSContext *cx, JSScript *script, JSObject &scopeChain, const V
Probes::startExecution(cx, script);
if (!script->ensureRanAnalysis(cx, NULL, &scopeChain))
if (!script->ensureRanAnalysis(cx, &scopeChain))
return false;
TypeScript::SetThis(cx, script, fp->thisValue());
@ -801,8 +802,10 @@ js::Execute(JSContext *cx, JSScript *script, JSObject &scopeChainArg, Value *rva
JS_ASSERT(!scopeChain->getOps()->defineProperty);
/* The VAROBJFIX option makes varObj == globalObj in global code. */
if (!cx->hasRunOption(JSOPTION_VAROBJFIX))
scopeChain->makeVarObj();
if (!cx->hasRunOption(JSOPTION_VAROBJFIX)) {
if (!scopeChain->setVarObj(cx))
return false;
}
/* Use the scope chain as 'this', modulo outerization. */
JSObject *thisObj = scopeChain->thisObject(cx);
@ -922,7 +925,6 @@ js::LooselyEqual(JSContext *cx, const Value &lval, const Value &rval, JSBool *re
if (lval.isObject()) {
JSObject *l = &lval.toObject();
JSObject *r = &rval.toObject();
l->assertSpecialEqualitySynced();
if (JSEqualityOp eq = l->getClass()->ext.equality) {
return eq(cx, l, &rval, result);
@ -1073,7 +1075,7 @@ js::InvokeConstructorKernel(JSContext *cx, const CallArgs &argsRef)
JSObject *callee = &args.callee();
Class *clasp = callee->getClass();
if (clasp == &FunctionClass) {
JSFunction *fun = callee->getFunctionPrivate();
JSFunction *fun = callee->toFunction();
if (fun->isConstructor()) {
Probes::calloutBegin(cx, fun);
@ -1117,7 +1119,7 @@ js::InvokeConstructorWithGivenThis(JSContext *cx, JSObject *thisobj, const Value
Class *clasp = callee.getClass();
JSFunction *fun;
bool ok;
if (clasp == &FunctionClass && (fun = callee.getFunctionPrivate())->isConstructor()) {
if (clasp == &FunctionClass && (fun = callee.toFunction())->isConstructor()) {
args.thisv().setMagicWithObjectOrNullPayload(thisobj);
Probes::calloutBegin(cx, fun);
ok = CallJSNativeConstructor(cx, fun->u.n.native, args);
@ -1205,7 +1207,7 @@ LeaveWith(JSContext *cx)
JS_ASSERT(withobj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp()));
JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0);
withobj->setPrivate(NULL);
cx->fp()->setScopeChainNoCallObj(*withobj->getParent());
cx->fp()->setScopeChainNoCallObj(*withobj->internalScopeChain());
}
bool
@ -1411,9 +1413,9 @@ inline InterpreterFrames::~InterpreterFrames()
*/
#if defined DEBUG && !defined JS_THREADSAFE
# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \
# define ASSERT_VALID_PROPERTY_CACHE_HIT(obj,pobj,entry) \
JS_BEGIN_MACRO \
if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \
if (!AssertValidPropertyCacheHit(cx, script, regs, obj, pobj, \
entry)) { \
goto error; \
} \
@ -1421,17 +1423,14 @@ inline InterpreterFrames::~InterpreterFrames()
static bool
AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, FrameRegs& regs,
ptrdiff_t pcoff, JSObject *start, JSObject *found,
JSObject *start, JSObject *found,
PropertyCacheEntry *entry)
{
uint32 sample = cx->runtime->gcNumber;
PropertyCacheEntry savedEntry = *entry;
JSAtom *atom;
if (pcoff >= 0)
GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom);
else
atom = cx->runtime->atomState.lengthAtom;
GET_ATOM_FROM_BYTECODE(script, regs.pc, 0, atom);
JSObject *obj, *pobj;
JSProperty *prop;
@ -1452,34 +1451,13 @@ AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, FrameRegs& regs,
JS_ASSERT(pobj == found);
const Shape *shape = (Shape *) prop;
if (entry->vword.isSlot()) {
JS_ASSERT(entry->vword.toSlot() == shape->slot);
JS_ASSERT(!shape->isMethod());
} else if (entry->vword.isShape()) {
JS_ASSERT(entry->vword.toShape() == shape);
JS_ASSERT_IF(shape->isMethod(),
shape->methodObject() == pobj->nativeGetSlot(shape->slot).toObject());
} else {
Value v;
JS_ASSERT(entry->vword.isFunObj());
JS_ASSERT(!entry->vword.isNull());
JS_ASSERT(pobj->brandedOrHasMethodBarrier());
JS_ASSERT(shape->hasDefaultGetterOrIsMethod());
JS_ASSERT(pobj->containsSlot(shape->slot));
v = pobj->nativeGetSlot(shape->slot);
JS_ASSERT(entry->vword.toFunObj() == v.toObject());
if (shape->isMethod()) {
JS_ASSERT(js_CodeSpec[*regs.pc].format & JOF_CALLOP);
JS_ASSERT(shape->methodObject() == v.toObject());
}
}
JS_ASSERT(entry->prop == shape);
return true;
}
#else
# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0)
# define ASSERT_VALID_PROPERTY_CACHE_HIT(obj,pobj,entry) ((void) 0)
#endif
/*
@ -1995,7 +1973,7 @@ BEGIN_CASE(JSOP_POPN)
JS_ASSERT_IF(obj,
OBJ_BLOCK_DEPTH(cx, obj) + OBJ_BLOCK_COUNT(cx, obj)
<= (size_t) (regs.sp - regs.fp()->base()));
for (obj = &regs.fp()->scopeChain(); obj; obj = obj->getParent()) {
for (obj = &regs.fp()->scopeChain(); obj; obj = obj->scopeChain()) {
if (!obj->isBlock() || !obj->isWith())
continue;
if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, regs.fp()))
@ -2353,10 +2331,10 @@ END_CASE(JSOP_PICK)
JS_BEGIN_MACRO \
if (shape->isDataDescriptor() && shape->hasDefaultGetter()) { \
/* Fast path for Object instance properties. */ \
JS_ASSERT((shape)->slot != SHAPE_INVALID_SLOT || \
JS_ASSERT((shape)->slot() != SHAPE_INVALID_SLOT || \
!shape->hasDefaultSetter()); \
if (((shape)->slot != SHAPE_INVALID_SLOT)) \
*(vp) = (pobj)->nativeGetSlot((shape)->slot); \
if (((shape)->slot() != SHAPE_INVALID_SLOT)) \
*(vp) = (pobj)->nativeGetSlot((shape)->slot()); \
else \
(vp)->setUndefined(); \
} else { \
@ -2368,8 +2346,8 @@ END_CASE(JSOP_PICK)
#define NATIVE_SET(cx,obj,shape,entry,strict,vp) \
JS_BEGIN_MACRO \
if (shape->hasDefaultSetter() && \
(shape)->slot != SHAPE_INVALID_SLOT && \
!(obj)->brandedOrHasMethodBarrier()) { \
(shape)->hasSlot() && \
!(shape)->isMethod()) { \
/* Fast path for, e.g., plain Object instance properties. */ \
(obj)->nativeSetSlotWithType(cx, shape, *vp); \
} else { \
@ -2435,7 +2413,7 @@ BEGIN_CASE(JSOP_BINDNAME)
* forms.
*/
obj = &regs.fp()->scopeChain();
if (!obj->getParent())
if (obj->isGlobal())
break;
PropertyCacheEntry *entry;
@ -2443,7 +2421,7 @@ BEGIN_CASE(JSOP_BINDNAME)
JSAtom *atom;
JS_PROPERTY_CACHE(cx).test(cx, regs.pc, obj, obj2, entry, atom);
if (!atom) {
ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry);
ASSERT_VALID_PROPERTY_CACHE_HIT(obj, obj2, entry);
break;
}
@ -2933,57 +2911,17 @@ BEGIN_CASE(JSOP_VOID)
regs.sp[-1].setUndefined();
END_CASE(JSOP_VOID)
{
/*
* Property incops are followed by an equivalent decomposed version,
* and we have the option of running either. If type inference is enabled
* we run the decomposed version to accumulate observed types and
* overflows which inference can process, otherwise we run the fat opcode
* as doing so is faster and is what the tracer needs while recording.
*/
JSObject *obj;
JSAtom *atom;
jsid id;
jsint i;
BEGIN_CASE(JSOP_INCELEM)
BEGIN_CASE(JSOP_DECELEM)
BEGIN_CASE(JSOP_ELEMINC)
BEGIN_CASE(JSOP_ELEMDEC)
if (cx->typeInferenceEnabled()) {
len = JSOP_INCELEM_LENGTH;
DO_NEXT_OP(len);
}
/*
* Delay fetching of id until we have the object to ensure the proper
* evaluation order. See bug 372331.
*/
id = JSID_VOID;
i = -2;
goto fetch_incop_obj;
/* No-op */
END_CASE(JSOP_INCELEM)
BEGIN_CASE(JSOP_INCPROP)
BEGIN_CASE(JSOP_DECPROP)
BEGIN_CASE(JSOP_PROPINC)
BEGIN_CASE(JSOP_PROPDEC)
if (cx->typeInferenceEnabled()) {
len = JSOP_INCPROP_LENGTH;
DO_NEXT_OP(len);
}
LOAD_ATOM(0, atom);
id = ATOM_TO_JSID(atom);
i = -1;
fetch_incop_obj:
FETCH_OBJECT(cx, i, obj);
if (JSID_IS_VOID(id))
FETCH_ELEMENT_ID(obj, -1, id);
goto do_incop;
BEGIN_CASE(JSOP_INCNAME)
BEGIN_CASE(JSOP_DECNAME)
BEGIN_CASE(JSOP_NAMEINC)
@ -2992,114 +2930,8 @@ BEGIN_CASE(JSOP_INCGNAME)
BEGIN_CASE(JSOP_DECGNAME)
BEGIN_CASE(JSOP_GNAMEINC)
BEGIN_CASE(JSOP_GNAMEDEC)
{
if (cx->typeInferenceEnabled()) {
len = JSOP_INCNAME_LENGTH;
DO_NEXT_OP(len);
}
obj = &regs.fp()->scopeChain();
bool global = (js_CodeSpec[op].format & JOF_GNAME);
if (global)
obj = obj->getGlobal();
JSObject *obj2;
PropertyCacheEntry *entry;
JS_PROPERTY_CACHE(cx).test(cx, regs.pc, obj, obj2, entry, atom);
if (!atom) {
ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry);
if (obj == obj2 && entry->vword.isSlot()) {
uint32 slot = entry->vword.toSlot();
const Value &rref = obj->nativeGetSlot(slot);
int32_t tmp;
if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) {
int32_t inc = tmp + ((js_CodeSpec[op].format & JOF_INC) ? 1 : -1);
if (!(js_CodeSpec[op].format & JOF_POST))
tmp = inc;
obj->nativeSetSlot(slot, Int32Value(inc));
PUSH_INT32(tmp);
len = JSOP_INCNAME_LENGTH + GetDecomposeLength(regs.pc, JSOP_INCNAME_LENGTH);
DO_NEXT_OP(len);
}
}
LOAD_ATOM(0, atom);
}
id = ATOM_TO_JSID(atom);
JSProperty *prop;
if (!js_FindPropertyHelper(cx, id, true, global, &obj, &obj2, &prop))
goto error;
if (!prop) {
atomNotDefined = atom;
goto atom_not_defined;
}
}
do_incop:
{
/*
* We need a root to store the value to leave on the stack until
* we have done with obj->setProperty.
*/
PUSH_NULL();
if (!obj->getGeneric(cx, id, &regs.sp[-1]))
goto error;
const JSCodeSpec *cs = &js_CodeSpec[op];
JS_ASSERT(cs->ndefs == 1);
JS_ASSERT((cs->format & JOF_TMPSLOT_MASK) >= JOF_TMPSLOT2);
uint32 format = cs->format;
uint32 setPropFlags = (JOF_MODE(format) == JOF_NAME)
? JSRESOLVE_ASSIGNING
: JSRESOLVE_ASSIGNING | JSRESOLVE_QUALIFIED;
Value &ref = regs.sp[-1];
int32_t tmp;
if (JS_LIKELY(ref.isInt32() && CanIncDecWithoutOverflow(tmp = ref.toInt32()))) {
int incr = (format & JOF_INC) ? 1 : -1;
if (format & JOF_POST)
ref.getInt32Ref() = tmp + incr;
else
ref.getInt32Ref() = tmp += incr;
{
JSAutoResolveFlags rf(cx, setPropFlags);
if (!obj->setGeneric(cx, id, &ref, script->strictModeCode))
goto error;
}
/*
* We must set regs.sp[-1] to tmp for both post and pre increments
* as the setter overwrites regs.sp[-1].
*/
ref.setInt32(tmp);
} else {
/* We need an extra root for the result. */
PUSH_NULL();
if (!DoIncDec(cx, cs, &regs.sp[-2], &regs.sp[-1]))
goto error;
{
JSAutoResolveFlags rf(cx, setPropFlags);
if (!obj->setGeneric(cx, id, &regs.sp[-1], script->strictModeCode))
goto error;
}
regs.sp--;
}
if (cs->nuses == 0) {
/* regs.sp[-1] already contains the result of name increment. */
} else {
regs.sp[-1 - cs->nuses] = regs.sp[-1];
regs.sp -= cs->nuses;
}
len = cs->length + GetDecomposeLength(regs.pc, cs->length);
DO_NEXT_OP(len);
}
}
/* No-op */
END_CASE(JSOP_INCPROP)
{
int incr, incr2;
@ -3164,19 +2996,6 @@ BEGIN_CASE(JSOP_THIS)
PUSH_COPY(regs.fp()->thisValue());
END_CASE(JSOP_THIS)
BEGIN_CASE(JSOP_UNBRANDTHIS)
{
if (!ComputeThis(cx, regs.fp()))
goto error;
Value &thisv = regs.fp()->thisValue();
if (thisv.isObject()) {
JSObject *obj = &thisv.toObject();
if (obj->isNative())
obj->unbrand(cx);
}
}
END_CASE(JSOP_UNBRANDTHIS)
BEGIN_CASE(JSOP_GETPROP)
BEGIN_CASE(JSOP_GETXPROP)
BEGIN_CASE(JSOP_LENGTH)
@ -3235,17 +3054,8 @@ BEGIN_CASE(JSOP_LENGTH)
JSAtom *atom;
JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
if (!atom) {
ASSERT_VALID_PROPERTY_CACHE_HIT(0, aobj, obj2, entry);
if (entry->vword.isFunObj()) {
rval.setObject(entry->vword.toFunObj());
} else if (entry->vword.isSlot()) {
uint32 slot = entry->vword.toSlot();
rval = obj2->nativeGetSlot(slot);
} else {
JS_ASSERT(entry->vword.isShape());
const Shape *shape = entry->vword.toShape();
NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval);
}
ASSERT_VALID_PROPERTY_CACHE_HIT(aobj, obj2, entry);
NATIVE_GET(cx, obj, obj2, entry->prop, JSGET_METHOD_BARRIER, &rval);
break;
}
@ -3303,17 +3113,8 @@ BEGIN_CASE(JSOP_CALLPROP)
JSAtom *atom;
JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
if (!atom) {
ASSERT_VALID_PROPERTY_CACHE_HIT(0, aobj, obj2, entry);
if (entry->vword.isFunObj()) {
rval.setObject(entry->vword.toFunObj());
} else if (entry->vword.isSlot()) {
uint32 slot = entry->vword.toSlot();
rval = obj2->nativeGetSlot(slot);
} else {
JS_ASSERT(entry->vword.isShape());
const Shape *shape = entry->vword.toShape();
NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval);
}
ASSERT_VALID_PROPERTY_CACHE_HIT(aobj, obj2, entry);
NATIVE_GET(cx, &objv.toObject(), obj2, entry->prop, JSGET_NO_METHOD_BARRIER, &rval);
regs.sp[-1] = rval;
assertSameCompartment(cx, regs.sp[-1]);
PUSH_COPY(lval);
@ -3361,11 +3162,6 @@ BEGIN_CASE(JSOP_CALLPROP)
}
END_CASE(JSOP_CALLPROP)
BEGIN_CASE(JSOP_UNBRAND)
JS_ASSERT(regs.sp - regs.fp()->slots() >= 1);
regs.sp[-1].toObject().unbrand(cx);
END_CASE(JSOP_UNBRAND)
BEGIN_CASE(JSOP_SETGNAME)
BEGIN_CASE(JSOP_SETNAME)
BEGIN_CASE(JSOP_SETPROP)
@ -3411,89 +3207,30 @@ BEGIN_CASE(JSOP_SETMETHOD)
* know that the entry applies to regs.pc and that obj's shape
* matches.
*
* The entry predicts either a new property to be added directly to
* obj by this set, or on an existing "own" property, or on a
* prototype property that has a setter.
* The entry predicts a set either an existing "own" property, or
* on a prototype property that has a setter.
*/
const Shape *shape = entry->vword.toShape();
const Shape *shape = entry->prop;
JS_ASSERT_IF(shape->isDataDescriptor(), shape->writable());
JS_ASSERT_IF(shape->hasSlot(), entry->vcapTag() == 0);
JS_ASSERT_IF(shape->hasSlot(), !entry->vindex);
/*
* Fastest path: check whether obj already has the cached shape and
* call NATIVE_SET and break to get out of the do-while(0). But we
* can call NATIVE_SET only for a direct or proto-setter hit.
*/
if (!entry->adding()) {
if (entry->vcapTag() == 0 ||
((obj2 = obj->getProto()) && obj2->shape() == entry->vshape()))
{
if (entry->vindex == 0 ||
((obj2 = obj->getProto()) && obj2->lastProperty() == entry->pshape)) {
#ifdef DEBUG
if (entry->directHit()) {
JS_ASSERT(obj->nativeContains(cx, *shape));
} else {
JS_ASSERT(obj2->nativeContains(cx, *shape));
JS_ASSERT(entry->vcapTag() == 1);
JS_ASSERT(entry->kshape != entry->vshape());
JS_ASSERT(!shape->hasSlot());
}
if (entry->directHit()) {
JS_ASSERT(obj->nativeContains(cx, *shape));
} else {
JS_ASSERT(obj2->nativeContains(cx, *shape));
JS_ASSERT(entry->vindex == 1);
JS_ASSERT(entry->kshape != entry->pshape);
JS_ASSERT(!shape->hasSlot());
}
#endif
PCMETER(cache->pchits++);
PCMETER(cache->setpchits++);
NATIVE_SET(cx, obj, shape, entry, script->strictModeCode, &rval);
break;
}
} else {
JS_ASSERT(obj->isExtensible());
if (obj->nativeEmpty()) {
if (!obj->ensureClassReservedSlotsForEmptyObject(cx))
goto error;
}
uint32 slot;
if (shape->previous() == obj->lastProperty() &&
entry->vshape() == rt->protoHazardShape &&
shape->hasDefaultSetter() &&
obj->getClass()->addProperty == JS_PropertyStub) {
slot = shape->slot;
JS_ASSERT(slot == obj->slotSpan());
/*
* Fast path: adding a plain old property that was once at
* the frontier of the property tree, whose slot is next to
* claim among the already-allocated slots in obj, where
* shape->table has not been created yet.
*/
PCMETER(cache->pchits++);
PCMETER(cache->addpchits++);
if (slot < obj->numSlots()) {
JS_ASSERT(obj->getSlot(slot).isUndefined());
} else {
if (!obj->allocSlot(cx, &slot))
goto error;
JS_ASSERT(slot == shape->slot);
}
/* Simply extend obj's property tree path with shape! */
obj->extend(cx, shape);
/*
* No method change check here because here we are adding a
* new property, not updating an existing slot's value that
* might contain a method of a branded shape.
*/
obj->nativeSetSlotWithType(cx, shape, rval);
/*
* Purge the property cache of the id we may have just
* shadowed in obj's scope and proto chains.
*/
js_PurgeScopeChain(cx, obj, shape->propid);
break;
}
PCMETER(cache->pchits++);
PCMETER(cache->setpchits++);
NATIVE_SET(cx, obj, shape, entry, script->strictModeCode, &rval);
break;
}
PCMETER(cache->setpcmisses++);
@ -3713,11 +3450,10 @@ BEGIN_CASE(JSOP_FUNAPPLY)
bool construct = (*regs.pc == JSOP_NEW);
JSObject *callee;
JSFunction *fun;
/* Don't bother trying to fast-path calls to scripted non-constructors. */
if (!IsFunctionObject(args.calleev(), &callee, &fun) || !fun->isInterpretedConstructor()) {
if (!IsFunctionObject(args.calleev(), &fun) || !fun->isInterpretedConstructor()) {
if (construct) {
if (!InvokeConstructorKernel(cx, args))
goto error;
@ -3737,7 +3473,7 @@ BEGIN_CASE(JSOP_FUNAPPLY)
InitialFrameFlags initial = construct ? INITIAL_CONSTRUCT : INITIAL_NONE;
JSScript *newScript = fun->script();
if (!cx->stack.pushInlineFrame(cx, regs, args, *callee, fun, newScript, initial))
if (!cx->stack.pushInlineFrame(cx, regs, args, *fun, newScript, initial))
goto error;
RESTORE_INTERP_VARS();
@ -3804,7 +3540,6 @@ BEGIN_CASE(JSOP_CALLNAME)
if (global)
obj = obj->getGlobal();
const Shape *shape;
Value rval;
PropertyCacheEntry *entry;
@ -3812,18 +3547,9 @@ BEGIN_CASE(JSOP_CALLNAME)
JSAtom *atom;
JS_PROPERTY_CACHE(cx).test(cx, regs.pc, obj, obj2, entry, atom);
if (!atom) {
ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry);
if (entry->vword.isFunObj()) {
PUSH_OBJECT(entry->vword.toFunObj());
} else if (entry->vword.isSlot()) {
uintN slot = entry->vword.toSlot();
PUSH_COPY(obj2->nativeGetSlot(slot));
} else {
JS_ASSERT(entry->vword.isShape());
shape = entry->vword.toShape();
NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval);
PUSH_COPY(rval);
}
ASSERT_VALID_PROPERTY_CACHE_HIT(obj, obj2, entry);
NATIVE_GET(cx, obj, obj2, entry->prop, JSGET_METHOD_BARRIER, &rval);
PUSH_COPY(rval);
TypeScript::Monitor(cx, script, regs.pc, regs.sp[-1]);
@ -3856,7 +3582,7 @@ BEGIN_CASE(JSOP_CALLNAME)
if (!obj->getGeneric(cx, id, &rval))
goto error;
} else {
shape = (Shape *)prop;
Shape *shape = (Shape *)prop;
JSObject *normalized = obj;
if (normalized->getClass() == &WithClass && !shape->hasDefaultGetter())
normalized = js_UnwrapWithObject(cx, normalized);
@ -4232,8 +3958,7 @@ BEGIN_CASE(JSOP_CALLFCSLOT)
uintN index = GET_UINT16(regs.pc);
JSObject *obj = &argv[-2].toObject();
JS_ASSERT(index < obj->getFunctionPrivate()->script()->bindings.countUpvars());
PUSH_COPY(obj->getFlatClosureUpvar(index));
PUSH_COPY(obj->toFunction()->getFlatClosureUpvar(index));
TypeScript::Monitor(cx, script, regs.pc, regs.sp[-1]);
if (op == JSOP_CALLFCSLOT)
PUSH_UNDEFINED();
@ -4324,8 +4049,8 @@ BEGIN_CASE(JSOP_DEFFUN)
* windows, and user-defined JS functions precompiled and then shared among
* requests in server-side JS.
*/
if (obj->getParent() != obj2) {
obj = CloneFunctionObject(cx, fun, obj2, true);
if (obj->toFunction()->environment() != obj2) {
obj = CloneFunctionObjectIfNotSingleton(cx, fun, obj2);
if (!obj)
goto error;
JS_ASSERT_IF(script->hasGlobal(), obj->getProto() == fun->getProto());
@ -4451,7 +4176,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN)
JSObject *obj = fun;
if (fun->isNullClosure()) {
obj = CloneFunctionObject(cx, fun, &regs.fp()->scopeChain(), true);
obj = CloneFunctionObjectIfNotSingleton(cx, fun, &regs.fp()->scopeChain());
if (!obj)
goto error;
} else {
@ -4460,8 +4185,8 @@ BEGIN_CASE(JSOP_DEFLOCALFUN)
if (!parent)
goto error;
if (obj->getParent() != parent) {
obj = CloneFunctionObject(cx, fun, parent, true);
if (obj->toFunction()->environment() != parent) {
obj = CloneFunctionObjectIfNotSingleton(cx, fun, parent);
if (!obj)
goto error;
}
@ -4517,8 +4242,7 @@ BEGIN_CASE(JSOP_LAMBDA)
JSObject *obj2 = &lref.toObject();
JS_ASSERT(obj2->isObject());
#endif
fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(pc2 - regs.pc)));
JS_ASSERT(fun->methodAtom() == script->getAtom(GET_FULL_INDEX(pc2 - regs.pc)));
break;
}
@ -4529,7 +4253,7 @@ BEGIN_CASE(JSOP_LAMBDA)
#endif
const Value &lref = regs.sp[-1];
if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) {
fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(pc2 - regs.pc)));
JS_ASSERT(fun->methodAtom() == script->getAtom(GET_FULL_INDEX(pc2 - regs.pc)));
break;
}
} else if (op2 == JSOP_CALL) {
@ -4548,11 +4272,10 @@ BEGIN_CASE(JSOP_LAMBDA)
* is the callee for this JSOP_CALL.
*/
const Value &cref = regs.sp[1 - (iargc + 2)];
JSObject *callee;
JSFunction *fun;
if (IsFunctionObject(cref, &callee)) {
JSFunction *calleeFun = callee->getFunctionPrivate();
if (Native native = calleeFun->maybeNative()) {
if (IsFunctionObject(cref, &fun)) {
if (Native native = fun->maybeNative()) {
if ((iargc == 1 && native == array_sort) ||
(iargc == 2 && native == str_replace)) {
break;
@ -4573,7 +4296,7 @@ BEGIN_CASE(JSOP_LAMBDA)
goto error;
}
obj = CloneFunctionObject(cx, fun, parent, true);
obj = CloneFunctionObjectIfNotSingleton(cx, fun, parent);
if (!obj)
goto error;
} while (0);
@ -4734,7 +4457,7 @@ BEGIN_CASE(JSOP_NEWINIT)
if (i == JSProto_Array) {
obj = NewDenseEmptyArray(cx);
} else {
gc::AllocKind kind = GuessObjectGCKind(0, false);
gc::AllocKind kind = GuessObjectGCKind(0);
obj = NewBuiltinClassInstance(cx, &ObjectClass, kind);
}
@ -4806,48 +4529,24 @@ BEGIN_CASE(JSOP_INITMETHOD)
JS_ASSERT(obj->isObject());
/*
* Probe the property cache.
*
* On a hit, if the cached shape has a non-default setter, it must be
* __proto__. If shape->previous() != obj->lastProperty(), there must be a
* repeated property name. The fast path does not handle these two cases.
* Probe the property cache to see if this is a set on an existing property
* added by a NEWOBJECT or a previous INITPROP. If the cached shape has a
* non-default setter, it must be __proto__, so don't handle this.
*/
PropertyCacheEntry *entry;
const Shape *shape;
if (JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, &shape, &entry) &&
shape->hasDefaultSetter() &&
shape->previous() == obj->lastProperty())
{
JSObject *obj2;
JSAtom *atom;
if (JS_PROPERTY_CACHE(cx).testForSet(cx, regs.pc, obj, &entry, &obj2, &atom) &&
entry->prop->hasDefaultSetter() &&
entry->vindex == 0) {
JS_ASSERT(obj == obj2);
/* Fast path. Property cache hit. */
uint32 slot = shape->slot;
JS_ASSERT(slot == obj->slotSpan());
JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
if (slot < obj->numSlots()) {
JS_ASSERT(obj->getSlot(slot).isUndefined());
} else {
if (!obj->allocSlot(cx, &slot))
goto error;
JS_ASSERT(slot == shape->slot);
}
/* A new object, or one we just extended in a recent initprop op. */
JS_ASSERT(!obj->lastProperty() ||
obj->shape() == obj->lastProperty()->shapeid);
obj->extend(cx, shape);
/*
* No method change check here because here we are adding a new
* property, not updating an existing slot's value that might
* contain a method of a branded shape.
*/
obj->nativeSetSlotWithType(cx, shape, rval);
obj->nativeSetSlotWithType(cx, entry->prop, rval);
} else {
PCMETER(JS_PROPERTY_CACHE(cx).inipcmisses++);
LOAD_ATOM(0, atom);
/* Get the immediate property name into id. */
JSAtom *atom;
LOAD_ATOM(0, atom);
jsid id = ATOM_TO_JSID(atom);
uintN defineHow = (op == JSOP_INITMETHOD)
@ -5500,13 +5199,13 @@ BEGIN_CASE(JSOP_ENTERBLOCK)
*/
JSObject *obj2 = &regs.fp()->scopeChain();
while (obj2->isWith())
obj2 = obj2->getParent();
obj2 = obj2->internalScopeChain();
if (obj2->isBlock() &&
obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, regs.fp())) {
JSObject *youngestProto = obj2->getProto();
JS_ASSERT(youngestProto->isStaticBlock());
JSObject *parent = obj;
while ((parent = parent->getParent()) != youngestProto)
while ((parent = parent->scopeChain()) != youngestProto)
JS_ASSERT(parent);
}
#endif

Просмотреть файл

@ -348,6 +348,14 @@ Debug_SetValueRangeToCrashOnTouch(Value *vec, size_t len)
#endif
}
static JS_ALWAYS_INLINE void
Debug_SetValueRangeToCrashOnTouch(HeapValue *vec, size_t len)
{
#ifdef DEBUG
Debug_SetValueRangeToCrashOnTouch((Value *) vec, len);
#endif
}
} /* namespace js */
#endif /* jsinterp_h___ */

Просмотреть файл

@ -92,7 +92,6 @@ static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly
Class js::IteratorClass = {
"Iterator",
JSCLASS_HAS_PRIVATE |
JSCLASS_CONCURRENT_FINALIZER |
JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
@ -118,6 +117,8 @@ Class js::IteratorClass = {
}
};
static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::FINALIZE_OBJECT2;
void
NativeIterator::mark(JSTracer *trc)
{
@ -220,8 +221,8 @@ EnumerateNativeProperties(JSContext *cx, JSObject *obj, JSObject *pobj, uintN fl
for (Shape::Range r = pobj->lastProperty()->all(); !r.empty(); r.popFront()) {
const Shape &shape = r.front();
if (!JSID_IS_DEFAULT_XML_NAMESPACE(shape.propid) &&
!Enumerate(cx, obj, pobj, shape.propid, shape.enumerable(), flags, ht, props))
if (!JSID_IS_DEFAULT_XML_NAMESPACE(shape.propid()) &&
!Enumerate(cx, obj, pobj, shape.propid(), shape.enumerable(), flags, ht, props))
{
return false;
}
@ -412,23 +413,21 @@ static inline JSObject *
NewIteratorObject(JSContext *cx, uintN flags)
{
if (flags & JSITER_ENUMERATE) {
/*
* Non-escaping native enumerator objects do not need map, proto, or
* parent. However, code in jstracer.cpp and elsewhere may find such a
* native enumerator object via the stack and (as for all objects that
* are not stillborn, with the exception of "NoSuchMethod" internal
* helper objects) expect it to have a non-null map pointer, so we
* share an empty Enumerator scope in the runtime.
*/
JSObject *obj = js_NewGCObject(cx, FINALIZE_OBJECT0);
types::TypeObject *type = cx->compartment->getEmptyType(cx);
if (!type)
return NULL;
Shape *emptyEnumeratorShape = EmptyShape::getInitialShape(cx, &IteratorClass, NULL, NULL,
ITERATOR_FINALIZE_KIND);
if (!emptyEnumeratorShape)
return NULL;
JSObject *obj = JSObject::create(cx, ITERATOR_FINALIZE_KIND,
emptyEnumeratorShape, type, NULL);
if (!obj)
return NULL;
EmptyShape *emptyEnumeratorShape = EmptyShape::getEmptyEnumeratorShape(cx);
if (!emptyEnumeratorShape)
return NULL;
obj->init(cx, &IteratorClass, &types::emptyTypeObject, NULL, NULL, false);
obj->setMap(emptyEnumeratorShape);
JS_ASSERT(obj->numFixedSlots() == JSObject::ITER_CLASS_NFIXED_SLOTS);
return obj;
}
@ -440,7 +439,7 @@ NativeIterator::allocateIterator(JSContext *cx, uint32 slength, const AutoIdVect
{
size_t plength = props.length();
NativeIterator *ni = (NativeIterator *)
cx->malloc_(sizeof(NativeIterator) + plength * sizeof(jsid) + slength * sizeof(uint32));
cx->malloc_(sizeof(NativeIterator) + plength * sizeof(jsid) + slength * sizeof(Shape *));
if (!ni)
return NULL;
ni->props_array = ni->props_cursor = (HeapId *) (ni + 1);
@ -457,7 +456,7 @@ NativeIterator::init(JSObject *obj, uintN flags, uint32 slength, uint32 key)
{
this->obj.init(obj);
this->flags = flags;
this->shapes_array = (uint32 *) this->props_end;
this->shapes_array = (const Shape **) this->props_end;
this->shapes_length = slength;
this->shapes_key = key;
}
@ -482,7 +481,8 @@ VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &key
JS_ASSERT(!(flags & JSITER_FOREACH));
if (obj) {
obj->flags |= JSObject::ITERATED;
if (obj->hasSingletonType() && !obj->setIteratedSingleton(cx))
return false;
types::MarkTypeObjectFlags(cx, obj, types::OBJECT_FLAG_ITERATED);
}
@ -506,7 +506,7 @@ VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &key
JSObject *pobj = obj;
size_t ind = 0;
do {
ni->shapes_array[ind++] = pobj->shape();
ni->shapes_array[ind++] = pobj->lastProperty();
pobj = pobj->getProto();
} while (pobj);
JS_ASSERT(ind == slength);
@ -534,7 +534,8 @@ VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &k
JS_ASSERT(flags & JSITER_FOREACH);
if (obj) {
obj->flags |= JSObject::ITERATED;
if (obj->hasSingletonType() && !obj->setIteratedSingleton(cx))
return false;
types::MarkTypeObjectFlags(cx, obj, types::OBJECT_FLAG_ITERATED);
}
@ -574,7 +575,7 @@ UpdateNativeIterator(NativeIterator *ni, JSObject *obj)
bool
GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
{
Vector<uint32, 8> shapes(cx);
Vector<const Shape *, 8> shapes(cx);
uint32 key = 0;
bool keysOnly = (flags == JSITER_ENUMERATE);
@ -603,9 +604,9 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
NativeIterator *lastni = last->getNativeIterator();
if (!(lastni->flags & (JSITER_ACTIVE|JSITER_UNREUSABLE)) &&
obj->isNative() &&
obj->shape() == lastni->shapes_array[0] &&
obj->lastProperty() == lastni->shapes_array[0] &&
proto && proto->isNative() &&
proto->shape() == lastni->shapes_array[1] &&
proto->lastProperty() == lastni->shapes_array[1] &&
!proto->getProto()) {
vp->setObject(*last);
UpdateNativeIterator(lastni, obj);
@ -623,14 +624,15 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
JSObject *pobj = obj;
do {
if (!pobj->isNative() ||
pobj->hasUncacheableProto() ||
obj->getOps()->enumerate ||
pobj->getClass()->enumerate != JS_EnumerateStub) {
shapes.clear();
goto miss;
}
uint32 shape = pobj->shape();
key = (key + (key << 16)) ^ shape;
if (!shapes.append(shape))
const Shape *shape = pobj->lastProperty();
key = (key + (key << 16)) ^ ((jsuword)shape >> 3);
if (!shapes.append((Shape *) shape))
return false;
pobj = pobj->getProto();
} while (pobj);
@ -1210,7 +1212,7 @@ js_NewGenerator(JSContext *cx)
JSObject *proto = global->getOrCreateGeneratorPrototype(cx);
if (!proto)
return NULL;
JSObject *obj = NewNonFunction<WithProto::Given>(cx, &GeneratorClass, proto, global);
JSObject *obj = NewObjectWithGivenProto(cx, &GeneratorClass, proto, global);
if (!obj)
return NULL;

Просмотреть файл

@ -61,11 +61,11 @@
namespace js {
struct NativeIterator {
HeapPtrObject obj;
HeapPtrObject obj;
HeapId *props_array;
HeapId *props_cursor;
HeapId *props_end;
uint32 *shapes_array;
const Shape **shapes_array;
uint32 shapes_length;
uint32 shapes_key;
uint32 flags;
@ -223,16 +223,6 @@ js_LiveFrameIfGenerator(js::StackFrame *fp)
#endif
namespace js {
static inline bool
IsStopIteration(const js::Value &v)
{
return v.isObject() && v.toObject().isStopIteration();
}
} /* namespace js */
extern JSObject *
js_InitIteratorClasses(JSContext *cx, JSObject *obj);

Просмотреть файл

@ -703,7 +703,7 @@ js_IsMathFunction(Native native)
JSObject *
js_InitMathClass(JSContext *cx, JSObject *obj)
{
JSObject *Math = NewNonFunction<WithProto::Class>(cx, &MathClass, NULL, obj);
JSObject *Math = NewObjectWithClassProto(cx, &MathClass, NULL, obj);
if (!Math || !Math->setSingletonType(cx))
return NULL;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -930,7 +930,7 @@ static JSFunctionSpec json_static_methods[] = {
JSObject *
js_InitJSONClass(JSContext *cx, JSObject *obj)
{
JSObject *JSON = NewNonFunction<WithProto::Class>(cx, &JSONClass, NULL, obj);
JSObject *JSON = NewObjectWithClassProto(cx, &JSONClass, NULL, obj);
if (!JSON || !JSON->setSingletonType(cx))
return NULL;

Просмотреть файл

@ -537,12 +537,12 @@ ToDisassemblySource(JSContext *cx, jsval v, JSAutoByteString *bytes)
while (!r.empty()) {
const Shape &shape = r.front();
JSAutoByteString bytes;
if (!js_AtomToPrintableString(cx, JSID_TO_ATOM(shape.propid), &bytes))
if (!js_AtomToPrintableString(cx, JSID_TO_ATOM(shape.propid()), &bytes))
return false;
r.popFront();
source = JS_sprintf_append(source, "%s: %d%s",
bytes.ptr(), shape.shortid,
bytes.ptr(), shape.shortid(),
!r.empty() ? ", " : "");
if (!source)
return false;
@ -556,8 +556,7 @@ ToDisassemblySource(JSContext *cx, jsval v, JSAutoByteString *bytes)
}
if (clasp == &FunctionClass) {
JSFunction *fun = obj->getFunctionPrivate();
JSString *str = JS_DecompileFunction(cx, fun, JS_DONT_PRETTY_PRINT);
JSString *str = JS_DecompileFunction(cx, obj->toFunction(), JS_DONT_PRETTY_PRINT);
if (!str)
return false;
return bytes->encode(cx, str);
@ -1554,10 +1553,10 @@ GetLocalInSlot(SprintStack *ss, jsint i, jsint slot, JSObject *obj)
for (Shape::Range r(obj->lastProperty()); !r.empty(); r.popFront()) {
const Shape &shape = r.front();
if (shape.shortid == slot) {
LOCAL_ASSERT(JSID_IS_ATOM(shape.propid));
JSAtom *atom = JSID_TO_ATOM(shape.propid);
if (shape.shortid() == slot) {
LOCAL_ASSERT(JSID_IS_ATOM(shape.propid()));
JSAtom *atom = JSID_TO_ATOM(shape.propid());
const char *rval = QuoteString(&ss->sprinter, atom, 0);
if (!rval)
return NULL;
@ -2049,8 +2048,8 @@ GetBlockNames(JSContext *cx, JSObject *blockObj, AtomVector *atoms)
const Shape &shape = r.front();
LOCAL_ASSERT(shape.hasShortID());
--i;
LOCAL_ASSERT((uintN)shape.shortid == i);
(*atoms)[i] = JSID_TO_ATOM(shape.propid);
LOCAL_ASSERT((uintN)shape.shortid() == i);
(*atoms)[i] = JSID_TO_ATOM(shape.propid());
}
LOCAL_ASSERT(i == 0);
@ -2320,7 +2319,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb)
uint32 format = cs->format;
if (((fp && pc == fp->pcQuadratic(cx)) ||
(pc == startpc && nuses != 0)) &&
format & (JOF_SET|JOF_DEL|JOF_INCDEC|JOF_FOR|JOF_VARPROP)) {
format & (JOF_SET|JOF_DEL|JOF_INCDEC|JOF_VARPROP)) {
uint32 mode = JOF_MODE(format);
if (mode == JOF_NAME) {
/*

Просмотреть файл

@ -109,7 +109,6 @@ typedef enum JSOp {
#define JOF_INC (2U<<10) /* increment (++, not --) opcode */
#define JOF_INCDEC (3U<<10) /* increment or decrement opcode */
#define JOF_POST (1U<<12) /* postorder increment or decrement */
#define JOF_FOR (1U<<13) /* for-in property op (akin to JOF_SET) */
#define JOF_ASSIGNING JOF_SET /* hint for Class.resolve, used for ops
that do simplex assignment */
#define JOF_DETECTING (1U<<14) /* object detection for JSNewResolveOp */

Просмотреть файл

@ -578,10 +578,8 @@ OPDEF(JSOP_OBJTOP, 222,"objtop", NULL, 3, 0, 0, 0, JOF_UINT16
*/
OPDEF(JSOP_SETMETHOD, 223,"setmethod", NULL, 3, 2, 1, 3, JOF_ATOM|JOF_PROP|JOF_SET|JOF_DETECTING)
OPDEF(JSOP_INITMETHOD, 224,"initmethod", NULL, 3, 2, 1, 3, JOF_ATOM|JOF_PROP|JOF_SET|JOF_DETECTING)
OPDEF(JSOP_UNBRAND, 225,"unbrand", NULL, 1, 1, 1, 0, JOF_BYTE)
OPDEF(JSOP_UNBRANDTHIS, 226,"unbrandthis", NULL, 1, 0, 0, 0, JOF_BYTE)
OPDEF(JSOP_SHARPINIT, 227,"sharpinit", NULL, 3, 0, 0, 0, JOF_UINT16|JOF_SHARPSLOT)
OPDEF(JSOP_SHARPINIT, 225,"sharpinit", NULL, 3, 0, 0, 0, JOF_UINT16|JOF_SHARPSLOT)
/* Pop the stack, convert to a jsid (int or string), and push back. */
OPDEF(JSOP_TOID, 228, "toid", NULL, 1, 1, 1, 0, JOF_BYTE)
OPDEF(JSOP_TOID, 226, "toid", NULL, 1, 1, 1, 0, JOF_BYTE)

Просмотреть файл

@ -137,6 +137,9 @@ bool resizeHeap(JSCompartment *compartment, size_t oldSize, size_t newSize);
*/
bool createObject(JSContext *cx, JSObject *obj);
/* Resize events are being tracked. */
bool objectResizeActive();
/* Object has been resized */
bool resizeObject(JSContext *cx, JSObject *obj, size_t oldSize, size_t newSize);
@ -489,6 +492,17 @@ Probes::finalizeObject(JSObject *obj)
return ok;
}
inline bool
Probes::objectResizeActive()
{
#ifdef MOZ_ETW
if (ProfilingActive)
return true;
#endif
return false;
}
inline bool
Probes::resizeObject(JSContext *cx, JSObject *obj, size_t oldSize, size_t newSize)
{

Просмотреть файл

@ -46,26 +46,17 @@
using namespace js;
JS_STATIC_ASSERT(sizeof(PCVal) == sizeof(jsuword));
JS_REQUIRES_STACK PropertyCacheEntry *
PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *pobj,
const Shape *shape, JSBool adding)
const Shape *shape)
{
jsuword kshape, vshape;
JSOp op;
const JSCodeSpec *cs;
PCVal vword;
PropertyCacheEntry *entry;
JS_ASSERT(this == &JS_PROPERTY_CACHE(cx));
JS_ASSERT(!cx->runtime->gcRunning);
if (js_IsPropertyCacheDisabled(cx)) {
PCMETER(disfills++);
return JS_NO_PROP_CACHE_FILL;
}
/*
* Check for fill from js_SetPropertyHelper where the setter removed shape
* from pobj (via unwatch or delete, e.g.).
@ -75,15 +66,6 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po
return JS_NO_PROP_CACHE_FILL;
}
/*
* Dictionary-mode objects have unique shapes, so there is no way to cache
* a prediction of the next shape when adding.
*/
if (adding && obj->inDictionaryMode()) {
PCMETER(add2dictfills++);
return JS_NO_PROP_CACHE_FILL;
}
/*
* Check for overdeep scope and prototype chain. Because resolve, getter,
* and setter hooks can change the prototype chain using JS_SetPrototype
@ -93,16 +75,26 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po
* The scopeIndex can't be wrong. We require JS_SetParent calls to happen
* before any running script might consult a parent-linked scope chain. If
* this requirement is not satisfied, the fill in progress will never hit,
* but vcap vs. scope shape tests ensure nothing malfunctions.
* but scope shape tests ensure nothing malfunctions.
*/
JS_ASSERT_IF(obj == pobj, scopeIndex == 0);
JSObject *tmp = obj;
for (uintN i = 0; i != scopeIndex; i++)
tmp = tmp->getParent();
tmp = tmp->internalScopeChain();
uintN protoIndex = 0;
while (tmp != pobj) {
/*
* Don't cache entries across prototype lookups which can mutate in
* arbitrary ways without a shape change.
*/
if (tmp->hasUncacheableProto()) {
PCMETER(noprotos++);
return JS_NO_PROP_CACHE_FILL;
}
tmp = tmp->getProto();
/*
@ -117,7 +109,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po
++protoIndex;
}
if (scopeIndex > PCVCAP_SCOPEMASK || protoIndex > PCVCAP_PROTOMASK) {
if (scopeIndex > PCINDEX_SCOPEMASK || protoIndex > PCINDEX_PROTOMASK) {
PCMETER(longchains++);
return JS_NO_PROP_CACHE_FILL;
}
@ -130,133 +122,9 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po
JSScript *script = cx->stack.currentScript(&pc);
op = js_GetOpcode(cx, script, pc);
cs = &js_CodeSpec[op];
kshape = 0;
do {
/*
* Check for a prototype "plain old method" callee computation. What
* is a plain old method? It's a function-valued property with stub
* getter, so get of a function is idempotent.
*/
if (cs->format & JOF_CALLOP) {
if (shape->isMethod()) {
/*
* A compiler-created function object, AKA a method, already
* memoized in the property tree.
*/
JS_ASSERT(pobj->hasMethodBarrier());
JSObject &funobj = shape->methodObject();
JS_ASSERT(funobj == pobj->nativeGetSlot(shape->slot).toObject());
vword.setFunObj(funobj);
break;
}
/*
* N.B. Objects are not branded if type inference is enabled, to
* allow property accesses without shape checks in JIT code.
*/
if (!pobj->generic() && shape->hasDefaultGetter() && pobj->containsSlot(shape->slot) &&
!cx->typeInferenceEnabled()) {
const Value &v = pobj->nativeGetSlot(shape->slot);
JSObject *funobj;
if (IsFunctionObject(v, &funobj)) {
/*
* Great, we have a function-valued prototype property
* where the getter is JS_PropertyStub. The type id in
* pobj does not evolve with changes to property values,
* however.
*
* So here, on first cache fill for this method, we brand
* obj with a new shape and set the JSObject::BRANDED flag.
* Once this flag is set, any property assignment that
* changes the value from or to a different function object
* will result in shape being regenerated.
*/
if (!pobj->branded()) {
PCMETER(brandfills++);
#ifdef DEBUG_notme
JSFunction *fun = JSVAL_TO_OBJECT(v)->getFunctionPrivate();
JSAutoByteString funNameBytes;
if (const char *funName = GetFunctionNameBytes(cx, fun, &funNameBytes)) {
fprintf(stderr,
"branding %p (%s) for funobj %p (%s), shape %lu\n",
pobj, pobj->getClass()->name, JSVAL_TO_OBJECT(v), funName,
obj->shape());
}
#endif
if (!pobj->brand(cx))
return JS_NO_PROP_CACHE_FILL;
}
vword.setFunObj(*funobj);
break;
}
}
} else if ((cs->format & (JOF_SET | JOF_FOR | JOF_INCDEC)) && obj->watched()) {
return JS_NO_PROP_CACHE_FILL;
}
/*
* If getting a value via a stub getter, or doing an INCDEC op
* with stub getters and setters, we can cache the slot.
*/
if (!(cs->format & (JOF_SET | JOF_FOR)) &&
(!(cs->format & JOF_INCDEC) || (shape->hasDefaultSetter() && shape->writable())) &&
shape->hasDefaultGetter() &&
pobj->containsSlot(shape->slot)) {
/* Great, let's cache shape's slot and use it on cache hit. */
vword.setSlot(shape->slot);
} else {
/* Best we can do is to cache shape (still a nice speedup). */
vword.setShape(shape);
if (adding &&
pobj->shape() == shape->shapeid) {
/*
* Our caller added a new property. We also know that a setter
* that js_NativeSet might have run has not mutated pobj, so
* the added property is still the last one added, and pobj is
* not branded.
*
* We want to cache under pobj's shape before the property
* addition to bias for the case when the mutator opcode
* always adds the same property. This allows us to optimize
* periodic execution of object initializers or other explicit
* initialization sequences such as
*
* obj = {}; obj.x = 1; obj.y = 2;
*
* We assume that on average the win from this optimization is
* greater than the cost of an extra mismatch per loop owing to
* the bias for the following case:
*
* obj = {}; ... for (...) { ... obj.x = ... }
*
* On the first iteration of such a for loop, JSOP_SETPROP
* fills the cache with the shape of the newly created object
* obj, not the shape of obj after obj.x has been assigned.
* That mismatches obj's shape on the second iteration. Note
* that on the third and subsequent iterations the cache will
* be hit because the shape is no longer updated.
*/
JS_ASSERT(shape == pobj->lastProperty());
JS_ASSERT(!pobj->nativeEmpty());
kshape = shape->previous()->shapeid;
/*
* When adding we predict no prototype object will later gain a
* readonly property or setter.
*/
vshape = cx->runtime->protoHazardShape;
}
}
} while (0);
if (kshape == 0) {
kshape = obj->shape();
vshape = pobj->shape();
}
JS_ASSERT(kshape < SHAPE_OVERFLOW_BIT);
if ((cs->format & JOF_SET) && obj->watched())
return JS_NO_PROP_CACHE_FILL;
if (obj == pobj) {
JS_ASSERT(scopeIndex == 0 && protoIndex == 0);
@ -272,20 +140,15 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po
/*
* Make sure that a later shadowing assignment will enter
* PurgeProtoChain and invalidate this entry, bug 479198.
*
* This is not thread-safe but we are about to make all objects
* except multi-threaded wrappers (bug 566951) single-threaded.
* And multi-threaded wrappers are non-native Proxy instances, so
* they won't use the property cache.
*/
obj->setDelegate();
if (!obj->isDelegate())
return JS_NO_PROP_CACHE_FILL;
}
}
JS_ASSERT(vshape < SHAPE_OVERFLOW_BIT);
entry = &table[hash(pc, kshape)];
entry = &table[hash(pc, obj->lastProperty())];
PCMETER(entry->vword.isNull() || recycles++);
entry->assign(pc, kshape, vshape, scopeIndex, protoIndex, vword);
entry->assign(pc, obj->lastProperty(), pobj->lastProperty(), shape, scopeIndex, protoIndex);
empty = false;
PCMETER(fills++);
@ -322,8 +185,6 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject
PropertyCacheEntry *entry)
{
JSObject *obj, *pobj, *tmp;
uint32 vcap;
JSScript *script = cx->stack.currentScript();
JS_ASSERT(this == &JS_PROPERTY_CACHE(cx));
@ -333,25 +194,24 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject
const JSCodeSpec &cs = js_CodeSpec[op];
obj = *objp;
vcap = entry->vcap;
uint32 vindex = entry->vindex;
if (entry->kpc != pc) {
PCMETER(kpcmisses++);
JSAtom *atom = GetAtomFromBytecode(cx, pc, op, cs);
#ifdef DEBUG_notme
JSScript *script = cx->fp()->getScript();
JSAutoByteString printable;
fprintf(stderr,
"id miss for %s from %s:%u"
" (pc %u, kpc %u, kshape %u, shape %u)\n",
" (pc %u, kpc %u, kshape %p, shape %p)\n",
js_AtomToPrintableString(cx, atom, &printable),
script->filename,
js_PCToLineNumber(cx, script, pc),
pc - script->code,
entry->kpc - script->code,
entry->kshape,
obj->shape());
obj->lastProperty());
js_Disassemble1(cx, script, pc,
pc - script->code,
JS_FALSE, stderr);
@ -360,39 +220,38 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject
return atom;
}
if (entry->kshape != obj->shape()) {
if (entry->kshape != obj->lastProperty()) {
PCMETER(kshapemisses++);
return GetAtomFromBytecode(cx, pc, op, cs);
}
/*
* PropertyCache::test handles only the direct and immediate-prototype hit
* cases. All others go here. We could embed the target object in the cache
* entry but then entry size would be 5 words. Instead we traverse chains.
* cases. All others go here.
*/
pobj = obj;
if (JOF_MODE(cs.format) == JOF_NAME) {
while (vcap & (PCVCAP_SCOPEMASK << PCVCAP_PROTOBITS)) {
tmp = pobj->getParent();
while (vindex & (PCINDEX_SCOPEMASK << PCINDEX_PROTOBITS)) {
tmp = pobj->scopeChain();
if (!tmp || !tmp->isNative())
break;
pobj = tmp;
vcap -= PCVCAP_PROTOSIZE;
vindex -= PCINDEX_PROTOSIZE;
}
*objp = pobj;
}
while (vcap & PCVCAP_PROTOMASK) {
while (vindex & PCINDEX_PROTOMASK) {
tmp = pobj->getProto();
if (!tmp || !tmp->isNative())
break;
pobj = tmp;
--vcap;
--vindex;
}
if (matchShape(cx, pobj, vcap >> PCVCAP_TAGBITS)) {
if (pobj->lastProperty() == entry->pshape) {
#ifdef DEBUG
JSAtom *atom = GetAtomFromBytecode(cx, pc, op, cs);
jsid id = ATOM_TO_JSID(atom);
@ -416,8 +275,9 @@ PropertyCache::assertEmpty()
for (uintN i = 0; i < SIZE; i++) {
JS_ASSERT(!table[i].kpc);
JS_ASSERT(!table[i].kshape);
JS_ASSERT(!table[i].vcap);
JS_ASSERT(table[i].vword.isNull());
JS_ASSERT(!table[i].pshape);
JS_ASSERT(!table[i].prop);
JS_ASSERT(!table[i].vindex);
}
}
#endif
@ -431,7 +291,6 @@ PropertyCache::purge(JSContext *cx)
}
PodArrayZero(table);
JS_ASSERT(table[0].vword.isNull());
empty = true;
#ifdef JS_PROPERTY_CACHE_METERING
@ -491,22 +350,6 @@ PropertyCache::purge(JSContext *cx)
PCMETER(flushes++);
}
void
PropertyCache::purgeForScript(JSContext *cx, JSScript *script)
{
JS_ASSERT(!cx->runtime->gcRunning);
for (PropertyCacheEntry *entry = table; entry < table + SIZE; entry++) {
if (UnsignedPtrDiff(entry->kpc, script->code) < script->length) {
entry->kpc = NULL;
#ifdef DEBUG
entry->kshape = entry->vcap = 0;
entry->vword.setNull();
#endif
}
}
}
void
PropertyCache::restore(PropertyCacheEntry *entry)
{

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше