diff --git a/accessible/src/atk/nsMaiInterfaceDocument.cpp b/accessible/src/atk/nsMaiInterfaceDocument.cpp index 0148d8076d18..4f3f07ff4540 100644 --- a/accessible/src/atk/nsMaiInterfaceDocument.cpp +++ b/accessible/src/atk/nsMaiInterfaceDocument.cpp @@ -81,24 +81,6 @@ getDocumentLocaleCB(AtkDocument *aDocument) return nsAccessibleWrap::ReturnString(locale); } -const gchar * -getDocumentTypeCB(AtkDocument *aDocument) -{ - nsAccessibleWrap *accWrap = GetAccessibleWrap(ATK_OBJECT(aDocument)); - if (!accWrap) - return nsnull; - - nsCOMPtr accDocument; - accWrap->QueryInterface(NS_GET_IID(nsIAccessibleDocument), - getter_AddRefs(accDocument)); - NS_ENSURE_TRUE(accDocument, nsnull); - - nsAutoString mimeType; - nsresult rv = accDocument->GetMimeType(mimeType); - NS_ENSURE_SUCCESS(rv, nsnull); - return nsAccessibleWrap::ReturnString(mimeType); -} - static inline GSList * prependToList(GSList *aList, const char *const aName, const nsAutoString &aValue) { diff --git a/accessible/src/base/nsCoreUtils.cpp b/accessible/src/base/nsCoreUtils.cpp index cdf887d92038..b9dde56628a9 100644 --- a/accessible/src/base/nsCoreUtils.cpp +++ b/accessible/src/base/nsCoreUtils.cpp @@ -660,22 +660,6 @@ nsCoreUtils::GetFirstSensibleColumn(nsITreeBoxObject *aTree) return column.forget(); } -already_AddRefed -nsCoreUtils::GetLastSensibleColumn(nsITreeBoxObject *aTree) -{ - nsCOMPtr cols; - aTree->GetColumns(getter_AddRefs(cols)); - if (!cols) - return nsnull; - - nsCOMPtr column; - cols->GetLastColumn(getter_AddRefs(column)); - if (column && IsColumnHidden(column)) - return GetPreviousSensibleColumn(column); - - return column.forget(); -} - PRUint32 nsCoreUtils::GetSensibleColumnCount(nsITreeBoxObject *aTree) { diff --git a/accessible/src/base/nsCoreUtils.h b/accessible/src/base/nsCoreUtils.h index 7321f4fdf7a4..5036077d29e8 100644 --- a/accessible/src/base/nsCoreUtils.h +++ b/accessible/src/base/nsCoreUtils.h @@ -327,12 +327,6 @@ public: static already_AddRefed GetFirstSensibleColumn(nsITreeBoxObject *aTree); - /** - * Return last sensible column for the given tree box object. - */ - static already_AddRefed - GetLastSensibleColumn(nsITreeBoxObject *aTree); - /** * Return sensible columns count for the given tree box object. */ diff --git a/allmakefiles.sh b/allmakefiles.sh index 682ff13710c3..fb29d86a2c09 100755 --- a/allmakefiles.sh +++ b/allmakefiles.sh @@ -86,6 +86,7 @@ if [ ! "$LIBXUL_SDK" ]; then if [ "$MOZ_WIDGET_TOOLKIT" = "android" ]; then add_makefiles " other-licenses/android/Makefile + other-licenses/skia-npapi/Makefile " fi fi @@ -115,6 +116,9 @@ if [ "$COMPILER_DEPEND" = "" -a "$MOZ_NATIVE_MAKEDEPEND" = "" ]; then fi if [ "$ENABLE_TESTS" ]; then + add_makefiles " + build/autoconf/test/Makefile + " if [ "$_MSC_VER" -a "$OS_TEST" != "x86_64" ]; then add_makefiles " build/win32/vmwarerecordinghelper/Makefile diff --git a/browser/base/content/browser.js b/browser/base/content/browser.js index 67bfdacef252..6107e1563f22 100644 --- a/browser/base/content/browser.js +++ b/browser/base/content/browser.js @@ -3478,10 +3478,29 @@ const BrowserSearch = { if (!submission) return; + let newTab; + function newTabHandler(event) { + newTab = event.target; + } + gBrowser.tabContainer.addEventListener("TabOpen", newTabHandler); + openLinkIn(submission.uri.spec, useNewTab ? "tab" : "current", { postData: submission.postData, relatedToCurrent: true }); + + gBrowser.tabContainer.removeEventListener("TabOpen", newTabHandler); + if (newTab && !newTab.selected) { + let tabSelected = false; + function tabSelectHandler() { + tabSelected = true; + } + newTab.addEventListener("TabSelect", tabSelectHandler); + setTimeout(function () { + newTab.removeEventListener("TabSelect", tabSelectHandler); + Services.telemetry.getHistogramById("FX_CONTEXT_SEARCH_AND_TAB_SELECT").add(tabSelected); + }, 5000); + } }, /** @@ -7659,7 +7678,8 @@ var FeedHandler = { if (browserForLink == gBrowser.selectedBrowser) { // Batch updates to avoid updating the UI for multiple onLinkAdded events // fired within 100ms of each other. - clearTimeout(this._updateFeedTimeout); + if (this._updateFeedTimeout) + clearTimeout(this._updateFeedTimeout); this._updateFeedTimeout = setTimeout(this.updateFeeds.bind(this), 100); } } diff --git a/browser/base/content/pageinfo/pageInfo.js b/browser/base/content/pageinfo/pageInfo.js index f4a5abaf0603..30c7f5accd53 100644 --- a/browser/base/content/pageinfo/pageInfo.js +++ b/browser/base/content/pageinfo/pageInfo.js @@ -992,7 +992,7 @@ function makePreview(row) } #endif else { - // fallback image for protocols not allowed (e.g., data: or javascript:) + // fallback image for protocols not allowed (e.g., javascript:) // or elements not [yet] handled (e.g., object, embed). document.getElementById("brokenimagecontainer").collapsed = false; document.getElementById("theimagecontainer").collapsed = true; @@ -1228,8 +1228,6 @@ function selectImage() function checkProtocol(img) { var url = img[COL_IMAGE_ADDRESS]; - if (/^data:/.test(url) && /^image\//.test(img[COL_IMAGE_NODE].type)) - return true; - const regex = /^(https?|ftp|file|about|chrome|resource):/; - return regex.test(url); + return /^data:image\//i.test(url) || + /^(https?|ftp|file|about|chrome|resource):/.test(url); } diff --git a/browser/makefiles.sh b/browser/makefiles.sh index 1ce95fb0a151..75e3ed537041 100644 --- a/browser/makefiles.sh +++ b/browser/makefiles.sh @@ -69,8 +69,10 @@ browser/components/shell/src/Makefile browser/components/tabview/Makefile browser/devtools/Makefile browser/devtools/highlighter/Makefile +browser/devtools/scratchpad/Makefile browser/devtools/shared/Makefile browser/devtools/sourceeditor/Makefile +browser/devtools/styleeditor/Makefile browser/devtools/styleinspector/Makefile browser/devtools/webconsole/Makefile browser/fuel/Makefile @@ -140,6 +142,7 @@ if [ "$ENABLE_TESTS" ]; then browser/devtools/scratchpad/test/Makefile browser/devtools/shared/test/Makefile browser/devtools/sourceeditor/test/Makefile + browser/devtools/styleeditor/test/Makefile browser/devtools/styleinspector/test/browser/Makefile browser/devtools/webconsole/test/browser/Makefile browser/fuel/test/Makefile diff --git a/build/pgo/server-locations.txt b/build/pgo/server-locations.txt index 07da845042fb..3f2a94e19bb3 100644 --- a/build/pgo/server-locations.txt +++ b/build/pgo/server-locations.txt @@ -188,3 +188,7 @@ https://www.bank2.com:443 privileged,cert=escapeattack2 # https://redirproxy.example.com:443 privileged,redir=test1.example.com +# Host used for IndexedDB Quota testing +http://bug704464-1.example.com:80 privileged +http://bug704464-2.example.com:80 privileged +http://bug704464-3.example.com:80 privileged diff --git a/caps/src/nsScriptSecurityManager.cpp b/caps/src/nsScriptSecurityManager.cpp index 496276acd4c5..9e5c2b844026 100644 --- a/caps/src/nsScriptSecurityManager.cpp +++ b/caps/src/nsScriptSecurityManager.cpp @@ -1738,7 +1738,7 @@ nsScriptSecurityManager::CheckFunctionAccess(JSContext *aCx, void *aFunObj, #ifdef DEBUG { JS_ASSERT(JS_ObjectIsFunction(aCx, (JSObject *)aFunObj)); - JSFunction *fun = (JSFunction *)JS_GetPrivate(aCx, (JSObject *)aFunObj); + JSFunction *fun = JS_GetObjectFunction((JSObject *)aFunObj); JSScript *script = JS_GetFunctionScript(aCx, fun); NS_ASSERTION(!script, "Null principal for non-native function!"); @@ -2219,7 +2219,7 @@ nsScriptSecurityManager::GetFunctionObjectPrincipal(JSContext *cx, return result; } - JSFunction *fun = (JSFunction *)JS_GetPrivate(cx, obj); + JSFunction *fun = JS_GetObjectFunction(obj); JSScript *script = JS_GetFunctionScript(cx, fun); if (!script) @@ -2243,7 +2243,7 @@ nsScriptSecurityManager::GetFunctionObjectPrincipal(JSContext *cx, script = frameScript; } - else if (JS_GetFunctionObject(fun) != obj) + else if (!js::IsOriginalScriptFunction(fun)) { // Here, obj is a cloned function object. In this case, the // clone's prototype may have been precompiled from brutally @@ -2285,7 +2285,7 @@ nsScriptSecurityManager::GetFramePrincipal(JSContext *cx, #ifdef DEBUG if (NS_SUCCEEDED(*rv) && !result) { - JSFunction *fun = (JSFunction *)JS_GetPrivate(cx, obj); + JSFunction *fun = JS_GetObjectFunction(obj); JSScript *script = JS_GetFunctionScript(cx, fun); NS_ASSERTION(!script, "Null principal for non-native function!"); @@ -2432,7 +2432,7 @@ nsScriptSecurityManager::doGetObjectPrincipal(JSObject *aObj jsClass = js::GetObjectClass(aObj); if (jsClass == &js::CallClass) { - aObj = js::GetObjectParent(aObj); + aObj = js::GetObjectParentMaybeScope(aObj); if (!aObj) return nsnull; @@ -2484,7 +2484,7 @@ nsScriptSecurityManager::doGetObjectPrincipal(JSObject *aObj } } - aObj = js::GetObjectParent(aObj); + aObj = js::GetObjectParentMaybeScope(aObj); if (!aObj) break; diff --git a/content/base/src/nsAttrAndChildArray.cpp b/content/base/src/nsAttrAndChildArray.cpp index bcb3b51d4917..ed6e7f1f0d6f 100644 --- a/content/base/src/nsAttrAndChildArray.cpp +++ b/content/base/src/nsAttrAndChildArray.cpp @@ -366,31 +366,6 @@ nsAttrAndChildArray::AttrAt(PRUint32 aPos) const return &ATTRS(mImpl)[aPos - mapped].mValue; } -nsresult -nsAttrAndChildArray::SetAttr(nsIAtom* aLocalName, const nsAString& aValue) -{ - PRUint32 i, slotCount = AttrSlotCount(); - for (i = 0; i < slotCount && AttrSlotIsTaken(i); ++i) { - if (ATTRS(mImpl)[i].mName.Equals(aLocalName)) { - ATTRS(mImpl)[i].mValue.SetTo(aValue); - - return NS_OK; - } - } - - NS_ENSURE_TRUE(slotCount < ATTRCHILD_ARRAY_MAX_ATTR_COUNT, - NS_ERROR_FAILURE); - - if (i == slotCount && !AddAttrSlot()) { - return NS_ERROR_OUT_OF_MEMORY; - } - - new (&ATTRS(mImpl)[i].mName) nsAttrName(aLocalName); - new (&ATTRS(mImpl)[i].mValue) nsAttrValue(aValue); - - return NS_OK; -} - nsresult nsAttrAndChildArray::SetAndTakeAttr(nsIAtom* aLocalName, nsAttrValue& aValue) { diff --git a/content/base/src/nsAttrAndChildArray.h b/content/base/src/nsAttrAndChildArray.h index 7203ba476745..983d87e28025 100644 --- a/content/base/src/nsAttrAndChildArray.h +++ b/content/base/src/nsAttrAndChildArray.h @@ -103,7 +103,6 @@ public: PRUint32 AttrCount() const; const nsAttrValue* GetAttr(nsIAtom* aLocalName, PRInt32 aNamespaceID = kNameSpaceID_None) const; const nsAttrValue* AttrAt(PRUint32 aPos) const; - nsresult SetAttr(nsIAtom* aLocalName, const nsAString& aValue); nsresult SetAndTakeAttr(nsIAtom* aLocalName, nsAttrValue& aValue); nsresult SetAndTakeAttr(nsINodeInfo* aName, nsAttrValue& aValue); diff --git a/content/base/src/nsGkAtomList.h b/content/base/src/nsGkAtomList.h index 52f47f4d391c..46e1ce98597e 100644 --- a/content/base/src/nsGkAtomList.h +++ b/content/base/src/nsGkAtomList.h @@ -594,6 +594,9 @@ GK_ATOM(x_moz_errormessage, "x-moz-errormessage") GK_ATOM(msthemecompatible, "msthemecompatible") GK_ATOM(multicol, "multicol") GK_ATOM(multiple, "multiple") +#ifdef MOZ_MEDIA +GK_ATOM(muted, "muted") +#endif GK_ATOM(name, "name") GK_ATOM(_namespace, "namespace") GK_ATOM(namespaceAlias, "namespace-alias") diff --git a/content/base/src/nsStyleLinkElement.cpp b/content/base/src/nsStyleLinkElement.cpp index 5e9585647731..9cb62d9cfd4d 100644 --- a/content/base/src/nsStyleLinkElement.cpp +++ b/content/base/src/nsStyleLinkElement.cpp @@ -179,11 +179,11 @@ PRUint32 nsStyleLinkElement::ParseLinkTypes(const nsAString& aTypes) return linkMask; nsAString::const_iterator current(start); - bool inString = !nsCRT::IsAsciiSpace(*current); + bool inString = !nsContentUtils::IsHTMLWhitespace(*current); nsAutoString subString; while (current != done) { - if (nsCRT::IsAsciiSpace(*current)) { + if (nsContentUtils::IsHTMLWhitespace(*current)) { if (inString) { ToLowerCase(Substring(start, current), subString); linkMask |= ToLinkMask(subString); diff --git a/content/base/src/nsTreeSanitizer.cpp b/content/base/src/nsTreeSanitizer.cpp index f09b17af9fe4..01bd513d2be1 100644 --- a/content/base/src/nsTreeSanitizer.cpp +++ b/content/base/src/nsTreeSanitizer.cpp @@ -253,6 +253,9 @@ nsIAtom** const kAttributesHTML[] = { &nsGkAtoms::min, &nsGkAtoms::mozdonotsend, &nsGkAtoms::multiple, +#ifdef MOZ_MEDIA + &nsGkAtoms::muted, +#endif &nsGkAtoms::name, &nsGkAtoms::nohref, &nsGkAtoms::noshade, diff --git a/content/base/src/nsXHTMLContentSerializer.cpp b/content/base/src/nsXHTMLContentSerializer.cpp index 7209f2938d5c..76b4b83fe4ea 100644 --- a/content/base/src/nsXHTMLContentSerializer.cpp +++ b/content/base/src/nsXHTMLContentSerializer.cpp @@ -743,7 +743,7 @@ nsXHTMLContentSerializer::IsShorthandAttr(const nsIAtom* aAttrName, #ifdef MOZ_MEDIA // autoplay and controls if ((aElementName == nsGkAtoms::video || aElementName == nsGkAtoms::audio) && - (aAttrName == nsGkAtoms::autoplay || + (aAttrName == nsGkAtoms::autoplay || aAttrName == nsGkAtoms::muted || aAttrName == nsGkAtoms::controls)) { return true; } diff --git a/content/base/src/nsXMLContentSerializer.cpp b/content/base/src/nsXMLContentSerializer.cpp index 4d040727e28f..fefc74623990 100644 --- a/content/base/src/nsXMLContentSerializer.cpp +++ b/content/base/src/nsXMLContentSerializer.cpp @@ -1152,21 +1152,6 @@ nsXMLContentSerializer::CheckElementEnd(nsIContent * aContent, return aContent->GetChildCount() > 0; } -void -nsXMLContentSerializer::AppendToString(const PRUnichar* aStr, - PRInt32 aLength, - nsAString& aOutputStr) -{ - if (mBodyOnly && !mInBody) { - return; - } - PRInt32 length = (aLength == -1) ? nsCRT::strlen(aStr) : aLength; - - mColPos += length; - - aOutputStr.Append(aStr, length); -} - void nsXMLContentSerializer::AppendToString(const PRUnichar aChar, nsAString& aOutputStr) diff --git a/content/base/src/nsXMLContentSerializer.h b/content/base/src/nsXMLContentSerializer.h index b9cdbd0f78c6..69c9b0039dce 100644 --- a/content/base/src/nsXMLContentSerializer.h +++ b/content/base/src/nsXMLContentSerializer.h @@ -102,13 +102,6 @@ class nsXMLContentSerializer : public nsIContentSerializer { protected: - /** - * Appends a PRUnichar string and increments the column position - */ - void AppendToString(const PRUnichar* aStr, - PRInt32 aLength, - nsAString& aOutputStr); - /** * Appends a PRUnichar character and increments the column position */ diff --git a/content/canvas/src/CanvasUtils.cpp b/content/canvas/src/CanvasUtils.cpp index 2f822eb16d9a..4f8dfae17819 100644 --- a/content/canvas/src/CanvasUtils.cpp +++ b/content/canvas/src/CanvasUtils.cpp @@ -101,34 +101,6 @@ DoDrawImageSecurityCheck(nsHTMLCanvasElement *aCanvasElement, aCanvasElement->SetWriteOnly(); } -void -LogMessage (const nsCString& errorString) -{ - nsCOMPtr console(do_GetService(NS_CONSOLESERVICE_CONTRACTID)); - if (!console) - return; - - console->LogStringMessage(NS_ConvertUTF8toUTF16(errorString).get()); - fprintf(stderr, "%s\n", errorString.get()); -} - -void -LogMessagef (const char *fmt, ...) -{ - va_list ap; - va_start(ap, fmt); - char buf[256]; - - nsCOMPtr console(do_GetService(NS_CONSOLESERVICE_CONTRACTID)); - if (console) { - PR_vsnprintf(buf, 256, fmt, ap); - console->LogStringMessage(NS_ConvertUTF8toUTF16(nsDependentCString(buf)).get()); - fprintf(stderr, "%s\n", buf); - } - - va_end(ap); -} - bool CoerceDouble(jsval v, double* d) { diff --git a/content/canvas/src/CanvasUtils.h b/content/canvas/src/CanvasUtils.h index 36186dd8749c..e88363978f06 100644 --- a/content/canvas/src/CanvasUtils.h +++ b/content/canvas/src/CanvasUtils.h @@ -77,9 +77,6 @@ void DoDrawImageSecurityCheck(nsHTMLCanvasElement *aCanvasElement, bool forceWriteOnly, bool CORSUsed); -void LogMessage (const nsCString& errorString); -void LogMessagef (const char *fmt, ...); - // Make a double out of |v|, treating undefined values as 0.0 (for // the sake of sparse arrays). Return true iff coercion // succeeded. diff --git a/content/canvas/src/WebGLContext.cpp b/content/canvas/src/WebGLContext.cpp index c58c3e0c95f3..10a3e5a4a750 100644 --- a/content/canvas/src/WebGLContext.cpp +++ b/content/canvas/src/WebGLContext.cpp @@ -1162,9 +1162,21 @@ WebGLContext::MaybeRestoreContext() if (mContextLost || mAllowRestore) return; - gl->MakeCurrent(); - GLContext::ContextResetARB resetStatus = - (GLContext::ContextResetARB) gl->fGetGraphicsResetStatus(); + GLContext::ContextResetARB resetStatus = GLContext::CONTEXT_NO_ERROR; + if (mHasRobustness) { + gl->MakeCurrent(); + resetStatus = (GLContext::ContextResetARB) gl->fGetGraphicsResetStatus(); + // This call is safe as it does not actually interact with GL, so the + // context does not have to be current. + } else if (gl->GetContextType() == GLContext::ContextTypeEGL) { + // Simulate a ARB_robustness guilty context loss for when we + // get an EGL_CONTEXT_LOST error. It may not actually be guilty, + // but we can't make any distinction, so we must assume the worst + // case. + if (!gl->MakeCurrent(true) && gl->IsContextLost()) { + resetStatus = GLContext::CONTEXT_GUILTY_CONTEXT_RESET_ARB; + } + } if (resetStatus != GLContext::CONTEXT_NO_ERROR) { // It's already lost, but clean up after it and signal to JS that it is diff --git a/content/canvas/src/WebGLContext.h b/content/canvas/src/WebGLContext.h index 2372785a1420..549528471ba7 100644 --- a/content/canvas/src/WebGLContext.h +++ b/content/canvas/src/WebGLContext.h @@ -57,6 +57,7 @@ #include "nsIDOMHTMLElement.h" #include "nsIJSNativeInitializer.h" #include "nsIMemoryReporter.h" +#include "nsContentUtils.h" #include "GLContextProvider.h" #include "Layers.h" @@ -449,7 +450,7 @@ public: // Sets up the GL_ARB_robustness timer if it isn't already, so that if the // driver gets restarted, the context may get reset with it. void SetupRobustnessTimer() { - if (mContextLost || !mHasRobustness) + if (mContextLost || (!mHasRobustness && gl->GetContextType() != gl::GLContext::ContextTypeEGL)) return; // If the timer was already running, don't restart it here. Instead, diff --git a/content/canvas/src/WebGLContextGL.cpp b/content/canvas/src/WebGLContextGL.cpp index d13ccb63c648..a82fb379f7e9 100644 --- a/content/canvas/src/WebGLContextGL.cpp +++ b/content/canvas/src/WebGLContextGL.cpp @@ -3993,7 +3993,10 @@ nsresult WebGLContext::DOMElementToImageSurface(nsIDOMElement *imageOrCanvas, gfxImageSurface **imageOut, int *format) { - gfxImageSurface *surf = nsnull; + nsCOMPtr content = do_QueryInterface(imageOrCanvas); + if (!content) { + return NS_ERROR_FAILURE; + } PRUint32 flags = nsLayoutUtils::SFE_WANT_NEW_SURFACE | @@ -4005,7 +4008,7 @@ WebGLContext::DOMElementToImageSurface(nsIDOMElement *imageOrCanvas, flags |= nsLayoutUtils::SFE_NO_PREMULTIPLY_ALPHA; nsLayoutUtils::SurfaceFromElementResult res = - nsLayoutUtils::SurfaceFromElement(imageOrCanvas, flags); + nsLayoutUtils::SurfaceFromElement(content->AsElement(), flags); if (!res.mSurface) return NS_ERROR_FAILURE; if (res.mSurface->GetType() != gfxASurface::SurfaceTypeImage) { @@ -4034,11 +4037,10 @@ WebGLContext::DOMElementToImageSurface(nsIDOMElement *imageOrCanvas, } } - // part 2: if the DOM element is a canvas, check that it's not write-only. That would indicate a tainted canvas, - // i.e. a canvas that could contain cross-domain image data. - nsCOMPtr maybeDOMCanvas = do_QueryInterface(imageOrCanvas); - if (maybeDOMCanvas && maybeDOMCanvas->IsHTML(nsGkAtoms::canvas)) { - nsHTMLCanvasElement *canvas = static_cast(maybeDOMCanvas.get()); + // part 2: if the DOM element is a canvas, check that it's not write-only. + // That would indicate a tainted canvas, i.e. a canvas that could contain + // cross-domain image data. + if (nsHTMLCanvasElement* canvas = nsHTMLCanvasElement::FromContent(content)) { if (canvas->IsWriteOnly()) { LogMessageIfVerbose("The canvas used as source for texImage2D here is tainted (write-only). It is forbidden " "to load a WebGL texture from a tainted canvas. A Canvas becomes tainted for example " @@ -4052,7 +4054,7 @@ WebGLContext::DOMElementToImageSurface(nsIDOMElement *imageOrCanvas, // Notice that there is never a need to mark the WebGL canvas as write-only, since we reject write-only/cross-domain // texture sources in the first place. - surf = static_cast(res.mSurface.get()); + gfxImageSurface* surf = static_cast(res.mSurface.get()); res.mSurface.forget(); *imageOut = surf; diff --git a/content/canvas/src/nsCanvasRenderingContext2D.cpp b/content/canvas/src/nsCanvasRenderingContext2D.cpp index f28c7cfbdc0c..d5d6d4bc5bdd 100644 --- a/content/canvas/src/nsCanvasRenderingContext2D.cpp +++ b/content/canvas/src/nsCanvasRenderingContext2D.cpp @@ -1813,11 +1813,12 @@ nsCanvasRenderingContext2D::CreatePattern(nsIDOMHTMLElement *image, const nsAString& repeat, nsIDOMCanvasPattern **_retval) { - if (!image) { + nsCOMPtr content = do_QueryInterface(image); + if (!content) { return NS_ERROR_DOM_TYPE_MISMATCH_ERR; } - gfxPattern::GraphicsExtend extend; + gfxPattern::GraphicsExtend extend; if (repeat.IsEmpty() || repeat.EqualsLiteral("repeat")) { extend = gfxPattern::EXTEND_REPEAT; } else if (repeat.EqualsLiteral("repeat-x")) { @@ -1833,7 +1834,6 @@ nsCanvasRenderingContext2D::CreatePattern(nsIDOMHTMLElement *image, return NS_ERROR_DOM_SYNTAX_ERR; } - nsCOMPtr content = do_QueryInterface(image); nsHTMLCanvasElement* canvas = nsHTMLCanvasElement::FromContent(content); if (canvas) { nsIntSize size = canvas->GetSize(); @@ -1845,8 +1845,8 @@ nsCanvasRenderingContext2D::CreatePattern(nsIDOMHTMLElement *image, // The canvas spec says that createPattern should use the first frame // of animated images nsLayoutUtils::SurfaceFromElementResult res = - nsLayoutUtils::SurfaceFromElement(image, nsLayoutUtils::SFE_WANT_FIRST_FRAME | - nsLayoutUtils::SFE_WANT_NEW_SURFACE); + nsLayoutUtils::SurfaceFromElement(content->AsElement(), + nsLayoutUtils::SFE_WANT_FIRST_FRAME | nsLayoutUtils::SFE_WANT_NEW_SURFACE); if (!res.mSurface) return NS_ERROR_NOT_AVAILABLE; @@ -3399,11 +3399,11 @@ nsCanvasRenderingContext2D::DrawImage(nsIDOMElement *imgElt, float a1, if (!EnsureSurface()) return NS_ERROR_FAILURE; - if (!imgElt) { + nsCOMPtr content = do_QueryInterface(imgElt); + if (!content) { return NS_ERROR_DOM_TYPE_MISMATCH_ERR; } - nsCOMPtr content = do_QueryInterface(imgElt); nsHTMLCanvasElement* canvas = nsHTMLCanvasElement::FromContent(content); if (canvas) { nsIntSize size = canvas->GetSize(); @@ -3423,7 +3423,7 @@ nsCanvasRenderingContext2D::DrawImage(nsIDOMElement *imgElt, float a1, // of animated images PRUint32 sfeFlags = nsLayoutUtils::SFE_WANT_FIRST_FRAME; nsLayoutUtils::SurfaceFromElementResult res = - nsLayoutUtils::SurfaceFromElement(imgElt, sfeFlags); + nsLayoutUtils::SurfaceFromElement(content->AsElement(), sfeFlags); if (!res.mSurface) { // Spec says to silently do nothing if the element is still loading. return res.mIsStillLoading ? NS_OK : NS_ERROR_NOT_AVAILABLE; @@ -3433,7 +3433,8 @@ nsCanvasRenderingContext2D::DrawImage(nsIDOMElement *imgElt, float a1, // as a source to work around some Cairo self-copy semantics issues. if (res.mSurface == mSurface) { sfeFlags |= nsLayoutUtils::SFE_WANT_NEW_SURFACE; - res = nsLayoutUtils::SurfaceFromElement(imgElt, sfeFlags); + res = nsLayoutUtils::SurfaceFromElement(content->AsElement(), + sfeFlags); if (!res.mSurface) return NS_ERROR_NOT_AVAILABLE; } diff --git a/content/canvas/src/nsCanvasRenderingContext2DAzure.cpp b/content/canvas/src/nsCanvasRenderingContext2DAzure.cpp index 849519f5a5af..a856c6bba89d 100644 --- a/content/canvas/src/nsCanvasRenderingContext2DAzure.cpp +++ b/content/canvas/src/nsCanvasRenderingContext2DAzure.cpp @@ -1889,7 +1889,8 @@ nsCanvasRenderingContext2DAzure::CreatePattern(nsIDOMHTMLElement *image, const nsAString& repeat, nsIDOMCanvasPattern **_retval) { - if (!image) { + nsCOMPtr content = do_QueryInterface(image); + if (!content) { return NS_ERROR_DOM_TYPE_MISMATCH_ERR; } @@ -1908,9 +1909,7 @@ nsCanvasRenderingContext2DAzure::CreatePattern(nsIDOMHTMLElement *image, return NS_ERROR_DOM_SYNTAX_ERR; } - nsCOMPtr content = do_QueryInterface(image); nsHTMLCanvasElement* canvas = nsHTMLCanvasElement::FromContent(content); - if (canvas) { nsIntSize size = canvas->GetSize(); if (size.width == 0 || size.height == 0) { @@ -1939,8 +1938,8 @@ nsCanvasRenderingContext2DAzure::CreatePattern(nsIDOMHTMLElement *image, // The canvas spec says that createPattern should use the first frame // of animated images nsLayoutUtils::SurfaceFromElementResult res = - nsLayoutUtils::SurfaceFromElement(image, nsLayoutUtils::SFE_WANT_FIRST_FRAME | - nsLayoutUtils::SFE_WANT_NEW_SURFACE); + nsLayoutUtils::SurfaceFromElement(content->AsElement(), + nsLayoutUtils::SFE_WANT_FIRST_FRAME | nsLayoutUtils::SFE_WANT_NEW_SURFACE); if (!res.mSurface) { return NS_ERROR_NOT_AVAILABLE; @@ -3570,7 +3569,8 @@ nsCanvasRenderingContext2DAzure::DrawImage(nsIDOMElement *imgElt, float a1, float a6, float a7, float a8, PRUint8 optional_argc) { - if (!imgElt) { + nsCOMPtr content = do_QueryInterface(imgElt); + if (!content) { return NS_ERROR_DOM_TYPE_MISMATCH_ERR; } @@ -3591,7 +3591,6 @@ nsCanvasRenderingContext2DAzure::DrawImage(nsIDOMElement *imgElt, float a1, double sx,sy,sw,sh; double dx,dy,dw,dh; - nsCOMPtr content = do_QueryInterface(imgElt); nsHTMLCanvasElement* canvas = nsHTMLCanvasElement::FromContent(content); if (canvas) { nsIntSize size = canvas->GetSize(); @@ -3643,7 +3642,7 @@ nsCanvasRenderingContext2DAzure::DrawImage(nsIDOMElement *imgElt, float a1, // of animated images PRUint32 sfeFlags = nsLayoutUtils::SFE_WANT_FIRST_FRAME; nsLayoutUtils::SurfaceFromElementResult res = - nsLayoutUtils::SurfaceFromElement(imgElt, sfeFlags); + nsLayoutUtils::SurfaceFromElement(content->AsElement(), sfeFlags); if (!res.mSurface) { // Spec says to silently do nothing if the element is still loading. diff --git a/content/html/content/public/nsHTMLVideoElement.h b/content/html/content/public/nsHTMLVideoElement.h index 7891d3850a0c..bd6acb68585e 100644 --- a/content/html/content/public/nsHTMLVideoElement.h +++ b/content/html/content/public/nsHTMLVideoElement.h @@ -48,6 +48,14 @@ public: nsHTMLVideoElement(already_AddRefed aNodeInfo); virtual ~nsHTMLVideoElement(); + static nsHTMLVideoElement* FromContent(nsIContent* aPossibleVideo) + { + if (!aPossibleVideo || !aPossibleVideo->IsHTML(nsGkAtoms::video)) { + return NULL; + } + return static_cast(aPossibleVideo); + } + // nsISupports NS_DECL_ISUPPORTS_INHERITED diff --git a/content/html/content/src/nsHTMLMediaElement.cpp b/content/html/content/src/nsHTMLMediaElement.cpp index 5c159aeda259..f29e59bf6df3 100644 --- a/content/html/content/src/nsHTMLMediaElement.cpp +++ b/content/html/content/src/nsHTMLMediaElement.cpp @@ -1516,6 +1516,9 @@ nsresult nsHTMLMediaElement::SetAttr(PRInt32 aNameSpaceID, nsIAtom* aName, if (aNameSpaceID == kNameSpaceID_None && aName == nsGkAtoms::src) { Load(); } + if (aNameSpaceID == kNameSpaceID_None && aName == nsGkAtoms::muted) { + mMuted = true; + } if (aNotify && aNameSpaceID == kNameSpaceID_None) { if (aName == nsGkAtoms::autoplay) { StopSuspendingAfterFirstFrame(); @@ -2687,18 +2690,17 @@ nsHTMLMediaElement::CopyInnerTo(nsGenericElement* aDest) const dest->mMediaSize = mMediaSize; } else { nsIFrame* frame = GetPrimaryFrame(); - nsCOMPtr elem; + Element* element; if (frame && frame->GetType() == nsGkAtoms::HTMLVideoFrame && static_cast(frame)->ShouldDisplayPoster()) { - elem = do_QueryInterface(static_cast(frame)-> - GetPosterImage()); + nsIContent* content = static_cast(frame)->GetPosterImage(); + element = content ? content->AsElement() : NULL; } else { - elem = do_QueryInterface( - static_cast(const_cast(this))); + element = const_cast(this); } nsLayoutUtils::SurfaceFromElementResult res = - nsLayoutUtils::SurfaceFromElement(elem, + nsLayoutUtils::SurfaceFromElement(element, nsLayoutUtils::SFE_WANT_NEW_SURFACE); dest->mPrintSurface = res.mSurface; dest->mMediaSize = nsIntSize(res.mSize.width, res.mSize.height); diff --git a/content/html/document/src/nsHTMLDocument.cpp b/content/html/document/src/nsHTMLDocument.cpp index 51069d9ed3d7..e9df8264ec29 100644 --- a/content/html/document/src/nsHTMLDocument.cpp +++ b/content/html/document/src/nsHTMLDocument.cpp @@ -1745,7 +1745,8 @@ nsHTMLDocument::Open(const nsAString& aContentTypeOrUrl, nsCOMPtr newScope(do_QueryReferent(mScopeObject)); if (oldScope && newScope != oldScope) { - nsContentUtils::ReparentContentWrappersInScope(cx, oldScope, newScope); + rv = nsContentUtils::ReparentContentWrappersInScope(cx, oldScope, newScope); + NS_ENSURE_SUCCESS(rv, rv); } } @@ -1954,6 +1955,8 @@ nsHTMLDocument::WriteCommon(JSContext *cx, if (NS_FAILED(rv) || !mParser) { return rv; } + NS_ABORT_IF_FALSE(!JS_IsExceptionPending(cx), + "Open() succeeded but JS exception is pending"); } static NS_NAMED_LITERAL_STRING(new_line, "\n"); diff --git a/content/xul/document/src/nsXULDocument.cpp b/content/xul/document/src/nsXULDocument.cpp index 4175a4bed355..a05dce4f6b17 100644 --- a/content/xul/document/src/nsXULDocument.cpp +++ b/content/xul/document/src/nsXULDocument.cpp @@ -2364,23 +2364,6 @@ nsXULDocument::ContextStack::SetTopIndex(PRInt32 aIndex) } -bool -nsXULDocument::ContextStack::IsInsideXULTemplate() -{ - if (mDepth) { - for (nsIContent* element = mTop->mElement; element; - element = element->GetParent()) { - - if (element->NodeInfo()->Equals(nsGkAtoms::_template, - kNameSpaceID_XUL)) { - return true; - } - } - } - return false; -} - - //---------------------------------------------------------------------- // // Content model walking routines diff --git a/content/xul/document/src/nsXULDocument.h b/content/xul/document/src/nsXULDocument.h index 04a26920ae19..bec10543a2f6 100644 --- a/content/xul/document/src/nsXULDocument.h +++ b/content/xul/document/src/nsXULDocument.h @@ -357,8 +357,6 @@ protected: nsresult Peek(nsXULPrototypeElement** aPrototype, nsIContent** aElement, PRInt32* aIndex); nsresult SetTopIndex(PRInt32 aIndex); - - bool IsInsideXULTemplate(); }; friend class ContextStack; diff --git a/content/xul/templates/src/nsRuleNetwork.cpp b/content/xul/templates/src/nsRuleNetwork.cpp index c669afd6e52f..d62a22603660 100644 --- a/content/xul/templates/src/nsRuleNetwork.cpp +++ b/content/xul/templates/src/nsRuleNetwork.cpp @@ -454,13 +454,6 @@ TestNode::Constrain(InstantiationSet& aInstantiations) } -bool -TestNode::HasAncestor(const ReteNode* aNode) const -{ - return aNode == this || (mParent && mParent->HasAncestor(aNode)); -} - - //---------------------------------------------------------------------- ReteNodeSet::ReteNodeSet() diff --git a/content/xul/templates/src/nsRuleNetwork.h b/content/xul/templates/src/nsRuleNetwork.h index dfe8cf4470fd..7836a44d71f7 100644 --- a/content/xul/templates/src/nsRuleNetwork.h +++ b/content/xul/templates/src/nsRuleNetwork.h @@ -878,14 +878,6 @@ public: bool* aCantHandleYet) const = 0; //XXX probably better named "ApplyConstraints" or "Discrminiate" or something - /** - * Determine if this node has another node as its direct ancestor. - * @param aNode the node to look for. - * @return true if aNode is a direct ancestor of this node, false - * otherwise. - */ - bool HasAncestor(const ReteNode* aNode) const; - /** * Add another node as a child of this node. * @param aNode the node to add. diff --git a/dom/base/nsDOMWindowUtils.cpp b/dom/base/nsDOMWindowUtils.cpp index 5ae971245d3c..802155b7bc97 100644 --- a/dom/base/nsDOMWindowUtils.cpp +++ b/dom/base/nsDOMWindowUtils.cpp @@ -869,10 +869,18 @@ nsDOMWindowUtils::NodesFromRect(float aX, float aY, } static already_AddRefed -CanvasToImageSurface(nsIDOMHTMLCanvasElement *canvas) +CanvasToImageSurface(nsIDOMHTMLCanvasElement* aCanvas) { + nsCOMPtr node = do_QueryInterface(aCanvas); + if (!node) { + return nsnull; + } + + NS_ABORT_IF_FALSE(node->IsElement(), + "An nsINode that implements nsIDOMHTMLCanvasElement should " + "be an element."); nsLayoutUtils::SurfaceFromElementResult result = - nsLayoutUtils::SurfaceFromElement(canvas, + nsLayoutUtils::SurfaceFromElement(node->AsElement(), nsLayoutUtils::SFE_WANT_IMAGE_SURFACE); return static_cast(result.mSurface.forget().get()); } @@ -1442,44 +1450,20 @@ nsDOMWindowUtils::SendContentCommandEvent(const nsAString& aType, } NS_IMETHODIMP -nsDOMWindowUtils::GetClassName(char **aName) +nsDOMWindowUtils::GetClassName(const JS::Value& aObject, JSContext* aCx, char** aName) { if (!nsContentUtils::IsCallerTrustedForRead()) { return NS_ERROR_DOM_SECURITY_ERR; } - // get the xpconnect native call context - nsAXPCNativeCallContext *cc = nsnull; - nsContentUtils::XPConnect()->GetCurrentNativeCallContext(&cc); - if(!cc) - return NS_ERROR_FAILURE; - - // Get JSContext of current call - JSContext* cx; - nsresult rv = cc->GetJSContext(&cx); - if(NS_FAILED(rv) || !cx) - return NS_ERROR_FAILURE; - - // get argc and argv and verify arg count - PRUint32 argc; - rv = cc->GetArgc(&argc); - if(NS_FAILED(rv)) - return NS_ERROR_FAILURE; - - if(argc < 1) - return NS_ERROR_XPC_NOT_ENOUGH_ARGS; - - jsval* argv; - rv = cc->GetArgvPtr(&argv); - if(NS_FAILED(rv) || !argv) - return NS_ERROR_FAILURE; - // Our argument must be a non-null object. - if(JSVAL_IS_PRIMITIVE(argv[0])) + if (JSVAL_IS_PRIMITIVE(aObject)) { return NS_ERROR_XPC_BAD_CONVERT_JS; + } - *aName = NS_strdup(JS_GET_CLASS(cx, JSVAL_TO_OBJECT(argv[0]))->name); - return *aName ? NS_OK : NS_ERROR_OUT_OF_MEMORY; + *aName = NS_strdup(JS_GET_CLASS(aCx, JSVAL_TO_OBJECT(aObject))->name); + NS_ABORT_IF_FALSE(*aName, "NS_strdup should be infallible."); + return NS_OK; } NS_IMETHODIMP diff --git a/dom/indexedDB/AsyncConnectionHelper.cpp b/dom/indexedDB/AsyncConnectionHelper.cpp index 44eae0a7805e..7790aaabc361 100644 --- a/dom/indexedDB/AsyncConnectionHelper.cpp +++ b/dom/indexedDB/AsyncConnectionHelper.cpp @@ -279,7 +279,7 @@ AsyncConnectionHelper::Run() if (NS_SUCCEEDED(rv)) { bool hasSavepoint = false; if (mDatabase) { - IndexedDatabaseManager::SetCurrentDatabase(mDatabase); + IndexedDatabaseManager::SetCurrentWindow(mDatabase->Owner()); // Make the first savepoint. if (mTransaction) { @@ -292,7 +292,7 @@ AsyncConnectionHelper::Run() mResultCode = DoDatabaseWork(connection); if (mDatabase) { - IndexedDatabaseManager::SetCurrentDatabase(nsnull); + IndexedDatabaseManager::SetCurrentWindow(nsnull); // Release or roll back the savepoint depending on the error code. if (hasSavepoint) { diff --git a/dom/indexedDB/CheckQuotaHelper.cpp b/dom/indexedDB/CheckQuotaHelper.cpp index cbab492c7b12..a4f3ee5323f8 100644 --- a/dom/indexedDB/CheckQuotaHelper.cpp +++ b/dom/indexedDB/CheckQuotaHelper.cpp @@ -98,11 +98,9 @@ GetQuotaPermissions(const nsACString& aASCIIOrigin, } // anonymous namespace -CheckQuotaHelper::CheckQuotaHelper(IDBDatabase* aDatabase, +CheckQuotaHelper::CheckQuotaHelper(nsPIDOMWindow* aWindow, mozilla::Mutex& aMutex) -: mWindow(aDatabase->Owner()), - mWindowSerial(mWindow->GetSerial()), - mOrigin(aDatabase->Origin()), +: mWindow(aWindow), mMutex(aMutex), mCondVar(mMutex, "CheckQuotaHelper::mCondVar"), mPromptResult(0), @@ -175,51 +173,59 @@ CheckQuotaHelper::Run() { NS_ASSERTION(NS_IsMainThread(), "Wrong thread!"); - if (!mHasPrompted) { - mPromptResult = GetQuotaPermissions(mOrigin, mWindow); + nsresult rv = NS_OK; + + if (mASCIIOrigin.IsEmpty()) { + rv = IndexedDatabaseManager::GetASCIIOriginFromWindow(mWindow, + mASCIIOrigin); } - nsresult rv; - if (mHasPrompted) { - // Add permissions to the database, but only if we are in the parent - // process (if we are in the child process, we have already - // set the permission when the prompt was shown in the parent, as - // we cannot set the permission from the child). - if (mPromptResult != nsIPermissionManager::UNKNOWN_ACTION && - XRE_GetProcessType() == GeckoProcessType_Default) { - nsCOMPtr uri; - rv = NS_NewURI(getter_AddRefs(uri), mOrigin); - NS_ENSURE_SUCCESS(rv, rv); - - nsCOMPtr permissionManager = - do_GetService(NS_PERMISSIONMANAGER_CONTRACTID); - NS_ENSURE_STATE(permissionManager); - - rv = permissionManager->Add(uri, PERMISSION_INDEXEDDB_UNLIMITED, - mPromptResult, - nsIPermissionManager::EXPIRE_NEVER, 0); - NS_ENSURE_SUCCESS(rv, rv); + if (NS_SUCCEEDED(rv)) { + if (!mHasPrompted) { + mPromptResult = GetQuotaPermissions(mASCIIOrigin, mWindow); } - } - else if (mPromptResult == nsIPermissionManager::UNKNOWN_ACTION) { - PRUint32 quota = IndexedDatabaseManager::GetIndexedDBQuotaMB(); - nsString quotaString; - quotaString.AppendInt(quota); + if (mHasPrompted) { + // Add permissions to the database, but only if we are in the parent + // process (if we are in the child process, we have already + // set the permission when the prompt was shown in the parent, as + // we cannot set the permission from the child). + if (mPromptResult != nsIPermissionManager::UNKNOWN_ACTION && + XRE_GetProcessType() == GeckoProcessType_Default) { + nsCOMPtr uri; + rv = NS_NewURI(getter_AddRefs(uri), mASCIIOrigin); + NS_ENSURE_SUCCESS(rv, rv); + + nsCOMPtr permissionManager = + do_GetService(NS_PERMISSIONMANAGER_CONTRACTID); + NS_ENSURE_STATE(permissionManager); + + rv = permissionManager->Add(uri, PERMISSION_INDEXEDDB_UNLIMITED, + mPromptResult, + nsIPermissionManager::EXPIRE_NEVER, 0); + NS_ENSURE_SUCCESS(rv, rv); + } + } + else if (mPromptResult == nsIPermissionManager::UNKNOWN_ACTION) { + PRUint32 quota = IndexedDatabaseManager::GetIndexedDBQuotaMB(); - nsCOMPtr obs = GetObserverService(); - NS_ENSURE_STATE(obs); + nsString quotaString; + quotaString.AppendInt(quota); - // We have to watch to make sure that the window doesn't go away without - // responding to us. Otherwise our database threads will hang. - rv = obs->AddObserver(this, DOM_WINDOW_DESTROYED_TOPIC, false); - NS_ENSURE_SUCCESS(rv, rv); + nsCOMPtr obs = GetObserverService(); + NS_ENSURE_STATE(obs); - rv = obs->NotifyObservers(static_cast(this), - TOPIC_QUOTA_PROMPT, quotaString.get()); - NS_ENSURE_SUCCESS(rv, rv); + // We have to watch to make sure that the window doesn't go away without + // responding to us. Otherwise our database threads will hang. + rv = obs->AddObserver(this, DOM_WINDOW_DESTROYED_TOPIC, false); + NS_ENSURE_SUCCESS(rv, rv); - return NS_OK; + rv = obs->NotifyObservers(static_cast(this), + TOPIC_QUOTA_PROMPT, quotaString.get()); + NS_ENSURE_SUCCESS(rv, rv); + + return NS_OK; + } } MutexAutoLock lock(mMutex); diff --git a/dom/indexedDB/CheckQuotaHelper.h b/dom/indexedDB/CheckQuotaHelper.h index d88a1092cfb3..11df9637fc5c 100644 --- a/dom/indexedDB/CheckQuotaHelper.h +++ b/dom/indexedDB/CheckQuotaHelper.h @@ -65,23 +65,17 @@ public: NS_DECL_NSIINTERFACEREQUESTOR NS_DECL_NSIOBSERVER - CheckQuotaHelper(IDBDatabase* aDatabase, + CheckQuotaHelper(nsPIDOMWindow* aWindow, mozilla::Mutex& aMutex); bool PromptAndReturnQuotaIsDisabled(); void Cancel(); - PRUint32 WindowSerial() - { - return mWindowSerial; - } - private: nsPIDOMWindow* mWindow; - PRUint32 mWindowSerial; - nsCString mOrigin; + nsCString mASCIIOrigin; mozilla::Mutex& mMutex; mozilla::CondVar mCondVar; PRUint32 mPromptResult; diff --git a/dom/indexedDB/IDBDatabase.cpp b/dom/indexedDB/IDBDatabase.cpp index 8c56fbf2e20d..11a4fdbe5a1e 100644 --- a/dom/indexedDB/IDBDatabase.cpp +++ b/dom/indexedDB/IDBDatabase.cpp @@ -64,12 +64,6 @@ USING_INDEXEDDB_NAMESPACE namespace { -PRUint32 gDatabaseInstanceCount = 0; -mozilla::Mutex* gPromptHelpersMutex = nsnull; - -// Protected by gPromptHelpersMutex. -nsTArray >* gPromptHelpers = nsnull; - class CreateObjectStoreHelper : public AsyncConnectionHelper { public: @@ -195,11 +189,6 @@ IDBDatabase::IDBDatabase() mRunningVersionChange(false) { NS_ASSERTION(NS_IsMainThread(), "Wrong thread!"); - - if (!gDatabaseInstanceCount++) { - NS_ASSERTION(!gPromptHelpersMutex, "Should be null!"); - gPromptHelpersMutex = new mozilla::Mutex("IDBDatabase gPromptHelpersMutex"); - } } IDBDatabase::~IDBDatabase() @@ -218,86 +207,20 @@ IDBDatabase::~IDBDatabase() if (mListenerManager) { mListenerManager->Disconnect(); } - - if (!--gDatabaseInstanceCount) { - NS_ASSERTION(gPromptHelpersMutex, "Should not be null!"); - - delete gPromptHelpers; - gPromptHelpers = nsnull; - - delete gPromptHelpersMutex; - gPromptHelpersMutex = nsnull; - } -} - -bool -IDBDatabase::IsQuotaDisabled() -{ - NS_ASSERTION(!NS_IsMainThread(), "Wrong thread!"); - NS_ASSERTION(gPromptHelpersMutex, "This should never be null!"); - - MutexAutoLock lock(*gPromptHelpersMutex); - - if (!gPromptHelpers) { - gPromptHelpers = new nsAutoTArray, 10>(); - } - - CheckQuotaHelper* foundHelper = nsnull; - - PRUint32 count = gPromptHelpers->Length(); - for (PRUint32 index = 0; index < count; index++) { - nsRefPtr& helper = gPromptHelpers->ElementAt(index); - if (helper->WindowSerial() == Owner()->GetSerial()) { - foundHelper = helper; - break; - } - } - - if (!foundHelper) { - nsRefPtr* newHelper = gPromptHelpers->AppendElement(); - if (!newHelper) { - NS_WARNING("Out of memory!"); - return false; - } - *newHelper = new CheckQuotaHelper(this, *gPromptHelpersMutex); - foundHelper = *newHelper; - - { - // Unlock before calling out to XPCOM. - MutexAutoUnlock unlock(*gPromptHelpersMutex); - - nsresult rv = NS_DispatchToMainThread(foundHelper, NS_DISPATCH_NORMAL); - NS_ENSURE_SUCCESS(rv, false); - } - } - - return foundHelper->PromptAndReturnQuotaIsDisabled(); } void IDBDatabase::Invalidate() { NS_ASSERTION(NS_IsMainThread(), "Wrong thread!"); - NS_ASSERTION(gPromptHelpersMutex, "This should never be null!"); // Make sure we're closed too. Close(); - // Cancel any quota prompts that are currently being displayed. - { - MutexAutoLock lock(*gPromptHelpersMutex); - - if (gPromptHelpers) { - PRUint32 count = gPromptHelpers->Length(); - for (PRUint32 index = 0; index < count; index++) { - nsRefPtr& helper = gPromptHelpers->ElementAt(index); - if (helper->WindowSerial() == Owner()->GetSerial()) { - helper->Cancel(); - break; - } - } - } - } + // When the IndexedDatabaseManager needs to invalidate databases, all it has + // is an origin, so we call back into the manager to cancel any prompts for + // our owner. + IndexedDatabaseManager::CancelPromptsForWindow(Owner()); mInvalidated = true; } diff --git a/dom/indexedDB/IDBDatabase.h b/dom/indexedDB/IDBDatabase.h index 201d6e00ef42..6062e4a52ac2 100644 --- a/dom/indexedDB/IDBDatabase.h +++ b/dom/indexedDB/IDBDatabase.h @@ -122,8 +122,6 @@ public: return doc.forget(); } - bool IsQuotaDisabled(); - nsCString& Origin() { return mASCIIOrigin; diff --git a/dom/indexedDB/IDBFactory.cpp b/dom/indexedDB/IDBFactory.cpp index 78cfdec55376..1af035ef32a5 100644 --- a/dom/indexedDB/IDBFactory.cpp +++ b/dom/indexedDB/IDBFactory.cpp @@ -403,24 +403,10 @@ IDBFactory::OpenCommon(const nsAString& aName, nsIScriptContext* context = sgo->GetContext(); NS_ENSURE_TRUE(context, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR); - nsCOMPtr principal; - nsresult rv = nsContentUtils::GetSecurityManager()-> - GetSubjectPrincipal(getter_AddRefs(principal)); - NS_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR); - nsCString origin; - if (nsContentUtils::IsSystemPrincipal(principal)) { - origin.AssignLiteral("chrome"); - } - else { - rv = nsContentUtils::GetASCIIOrigin(principal, origin); - NS_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR); - - if (origin.EqualsLiteral("null")) { - NS_WARNING("IndexedDB databases not allowed for this principal!"); - return NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR; - } - } + nsresult rv = + IndexedDatabaseManager::GetASCIIOriginFromWindow(window, origin); + NS_ENSURE_SUCCESS(rv, rv); nsRefPtr request = IDBOpenDBRequest::Create(context, window); diff --git a/dom/indexedDB/IDBTransaction.cpp b/dom/indexedDB/IDBTransaction.cpp index 6079d62a41ff..6d4e08c11d32 100644 --- a/dom/indexedDB/IDBTransaction.cpp +++ b/dom/indexedDB/IDBTransaction.cpp @@ -887,7 +887,7 @@ CommitHelper::Run() } if (mConnection) { - IndexedDatabaseManager::SetCurrentDatabase(database); + IndexedDatabaseManager::SetCurrentWindow(database->Owner()); if (!mAborted) { NS_NAMED_LITERAL_CSTRING(release, "COMMIT TRANSACTION"); @@ -923,7 +923,7 @@ CommitHelper::Run() mConnection->Close(); mConnection = nsnull; - IndexedDatabaseManager::SetCurrentDatabase(nsnull); + IndexedDatabaseManager::SetCurrentWindow(nsnull); } return NS_OK; diff --git a/dom/indexedDB/IndexedDatabaseManager.cpp b/dom/indexedDB/IndexedDatabaseManager.cpp index 9b0ae25c7dec..264079af019f 100644 --- a/dom/indexedDB/IndexedDatabaseManager.cpp +++ b/dom/indexedDB/IndexedDatabaseManager.cpp @@ -41,6 +41,8 @@ #include "nsIFile.h" #include "nsIObserverService.h" +#include "nsIScriptObjectPrincipal.h" +#include "nsIScriptSecurityManager.h" #include "nsISHEntry.h" #include "nsISimpleEnumerator.h" #include "nsITimer.h" @@ -54,6 +56,7 @@ #include "nsXPCOMPrivate.h" #include "AsyncConnectionHelper.h" +#include "CheckQuotaHelper.h" #include "IDBDatabase.h" #include "IDBEvents.h" #include "IDBFactory.h" @@ -75,7 +78,7 @@ #define PREF_INDEXEDDB_QUOTA "dom.indexedDB.warningQuota" // A bad TLS index number. -#define BAD_TLS_INDEX (PRUintn)-1 +#define BAD_TLS_INDEX (PRUintn)-1 USING_INDEXEDDB_NAMESPACE using namespace mozilla::services; @@ -88,8 +91,6 @@ PRInt32 gShutdown = 0; // Does not hold a reference. IndexedDatabaseManager* gInstance = nsnull; -PRUintn gCurrentDatabaseIndex = BAD_TLS_INDEX; - PRInt32 gIndexedDBQuotaMB = DEFAULT_QUOTA_MB; class QuotaCallback : public mozIStorageQuotaCallback @@ -104,13 +105,7 @@ public: nsISupports* aUserData, PRInt64* _retval) { - NS_ASSERTION(gCurrentDatabaseIndex != BAD_TLS_INDEX, - "This should be impossible!"); - - IDBDatabase* database = - static_cast(PR_GetThreadPrivate(gCurrentDatabaseIndex)); - - if (database && database->IsQuotaDisabled()) { + if (IndexedDatabaseManager::QuotaIsLifted()) { *_retval = 0; return NS_OK; } @@ -146,6 +141,8 @@ EnumerateToTArray(const nsACString& aKey, } // anonymous namespace IndexedDatabaseManager::IndexedDatabaseManager() +: mCurrentWindowIndex(BAD_TLS_INDEX), + mQuotaHelperMutex("IndexedDatabaseManager.mQuotaHelperMutex") { NS_ASSERTION(NS_IsMainThread(), "Wrong thread!"); NS_ASSERTION(!gInstance, "More than one instance!"); @@ -172,30 +169,31 @@ IndexedDatabaseManager::GetOrCreate() nsRefPtr instance(gInstance); if (!instance) { - // We need a thread-local to hold our current database. - if (gCurrentDatabaseIndex == BAD_TLS_INDEX) { - if (PR_NewThreadPrivateIndex(&gCurrentDatabaseIndex, nsnull) != - PR_SUCCESS) { - NS_ERROR("PR_NewThreadPrivateIndex failed!"); - gCurrentDatabaseIndex = BAD_TLS_INDEX; - return nsnull; - } - - if (NS_FAILED(Preferences::AddIntVarCache(&gIndexedDBQuotaMB, - PREF_INDEXEDDB_QUOTA, - DEFAULT_QUOTA_MB))) { - NS_WARNING("Unable to respond to quota pref changes!"); - gIndexedDBQuotaMB = DEFAULT_QUOTA_MB; - } + if (NS_FAILED(Preferences::AddIntVarCache(&gIndexedDBQuotaMB, + PREF_INDEXEDDB_QUOTA, + DEFAULT_QUOTA_MB))) { + NS_WARNING("Unable to respond to quota pref changes!"); + gIndexedDBQuotaMB = DEFAULT_QUOTA_MB; } instance = new IndexedDatabaseManager(); - if (!instance->mLiveDatabases.Init()) { + if (!instance->mLiveDatabases.Init() || + !instance->mQuotaHelperHash.Init()) { NS_WARNING("Out of memory!"); return nsnull; } + // We need a thread-local to hold the current window. + NS_ASSERTION(instance->mCurrentWindowIndex == BAD_TLS_INDEX, "Huh?"); + + if (PR_NewThreadPrivateIndex(&instance->mCurrentWindowIndex, nsnull) != + PR_SUCCESS) { + NS_ERROR("PR_NewThreadPrivateIndex failed, IndexedDB disabled"); + instance->mCurrentWindowIndex = BAD_TLS_INDEX; + return nsnull; + } + // Make a timer here to avoid potential failures later. We don't actually // initialize the timer until shutdown. instance->mShutdownTimer = do_CreateInstance(NS_TIMER_CONTRACTID); @@ -548,30 +546,23 @@ IndexedDatabaseManager::OnDatabaseClosed(IDBDatabase* aDatabase) } } -// static -bool -IndexedDatabaseManager::SetCurrentDatabase(IDBDatabase* aDatabase) +void +IndexedDatabaseManager::SetCurrentWindowInternal(nsPIDOMWindow* aWindow) { - NS_ASSERTION(gCurrentDatabaseIndex != BAD_TLS_INDEX, - "This should have been set already!"); - + if (aWindow) { #ifdef DEBUG - if (aDatabase) { - NS_ASSERTION(!PR_GetThreadPrivate(gCurrentDatabaseIndex), - "Someone forgot to unset gCurrentDatabaseIndex!"); + NS_ASSERTION(!PR_GetThreadPrivate(mCurrentWindowIndex), + "Somebody forgot to clear the current window!"); +#endif + PR_SetThreadPrivate(mCurrentWindowIndex, aWindow); } else { - NS_ASSERTION(PR_GetThreadPrivate(gCurrentDatabaseIndex), - "Someone forgot to set gCurrentDatabaseIndex!"); - } +#ifdef DEBUG + NS_ASSERTION(PR_GetThreadPrivate(mCurrentWindowIndex), + "Somebody forgot to clear the current window!"); #endif - - if (PR_SetThreadPrivate(gCurrentDatabaseIndex, aDatabase) != PR_SUCCESS) { - NS_WARNING("Failed to set gCurrentDatabaseIndex!"); - return false; + PR_SetThreadPrivate(mCurrentWindowIndex, nsnull); } - - return true; } // static @@ -662,6 +653,102 @@ IndexedDatabaseManager::EnsureQuotaManagementForDirectory(nsIFile* aDirectory) return rv; } +bool +IndexedDatabaseManager::QuotaIsLiftedInternal() +{ + nsPIDOMWindow* window = nsnull; + nsRefPtr helper = nsnull; + bool createdHelper = false; + + window = + static_cast(PR_GetThreadPrivate(mCurrentWindowIndex)); + + // Once IDB is supported outside of Windows this should become an early + // return true. + NS_ASSERTION(window, "Why don't we have a Window here?"); + + // Hold the lock from here on. + MutexAutoLock autoLock(mQuotaHelperMutex); + + mQuotaHelperHash.Get(window, getter_AddRefs(helper)); + + if (!helper) { + helper = new CheckQuotaHelper(window, mQuotaHelperMutex); + createdHelper = true; + + bool result = mQuotaHelperHash.Put(window, helper); + NS_ENSURE_TRUE(result, result); + + // Unlock while calling out to XPCOM + { + MutexAutoUnlock autoUnlock(mQuotaHelperMutex); + + nsresult rv = NS_DispatchToMainThread(helper); + NS_ENSURE_SUCCESS(rv, false); + } + + // Relocked. If any other threads hit the quota limit on the same Window, + // they are using the helper we created here and are now blocking in + // PromptAndReturnQuotaDisabled. + } + + bool result = helper->PromptAndReturnQuotaIsDisabled(); + + // If this thread created the helper and added it to the hash, this thread + // must remove it. + if (createdHelper) { + mQuotaHelperHash.Remove(window); + } + + return result; +} + +void +IndexedDatabaseManager::CancelPromptsForWindowInternal(nsPIDOMWindow* aWindow) +{ + NS_ASSERTION(NS_IsMainThread(), "Wrong thread!"); + + nsRefPtr helper; + + MutexAutoLock autoLock(mQuotaHelperMutex); + + mQuotaHelperHash.Get(aWindow, getter_AddRefs(helper)); + + if (helper) { + helper->Cancel(); + } +} + +// static +nsresult +IndexedDatabaseManager::GetASCIIOriginFromWindow(nsPIDOMWindow* aWindow, + nsCString& aASCIIOrigin) +{ + NS_ASSERTION(NS_IsMainThread(), + "We're about to touch a window off the main thread!"); + + nsCOMPtr sop = do_QueryInterface(aWindow); + NS_ENSURE_TRUE(sop, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR); + + nsCOMPtr principal = sop->GetPrincipal(); + NS_ENSURE_TRUE(principal, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR); + + if (nsContentUtils::IsSystemPrincipal(principal)) { + aASCIIOrigin.AssignLiteral("chrome"); + } + else { + nsresult rv = nsContentUtils::GetASCIIOrigin(principal, aASCIIOrigin); + NS_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR); + + if (aASCIIOrigin.EqualsLiteral("null")) { + NS_WARNING("IndexedDB databases not allowed for this principal!"); + return NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR; + } + } + + return NS_OK; +} + // static nsresult IndexedDatabaseManager::DispatchHelper(AsyncConnectionHelper* aHelper) diff --git a/dom/indexedDB/IndexedDatabaseManager.h b/dom/indexedDB/IndexedDatabaseManager.h index 3742ead1b882..482828245291 100644 --- a/dom/indexedDB/IndexedDatabaseManager.h +++ b/dom/indexedDB/IndexedDatabaseManager.h @@ -44,6 +44,8 @@ #include "mozilla/dom/indexedDB/IDBDatabase.h" #include "mozilla/dom/indexedDB/IDBRequest.h" +#include "mozilla/Mutex.h" + #include "nsIIndexedDatabaseManager.h" #include "nsIObserver.h" #include "nsIRunnable.h" @@ -51,6 +53,7 @@ #include "nsIURI.h" #include "nsClassHashtable.h" +#include "nsRefPtrHashtable.h" #include "nsHashKeys.h" #define INDEXEDDB_MANAGER_CONTRACTID "@mozilla.org/dom/indexeddb/manager;1" @@ -62,6 +65,8 @@ BEGIN_INDEXEDDB_NAMESPACE class AsyncConnectionHelper; +class CheckQuotaHelper; + class IndexedDatabaseManager : public nsIIndexedDatabaseManager, public nsIObserver { @@ -129,14 +134,45 @@ public: // Used to check if there are running transactions in a given window. bool HasOpenTransactions(nsPIDOMWindow* aWindow); - static bool - SetCurrentDatabase(IDBDatabase* aDatabase); + // Set the Window that the current thread is doing operations for. + // The caller is responsible for ensuring that aWindow is held alive. + static inline void + SetCurrentWindow(nsPIDOMWindow* aWindow) + { + IndexedDatabaseManager* mgr = Get(); + NS_ASSERTION(mgr, "Must have a manager here!"); + + return mgr->SetCurrentWindowInternal(aWindow); + } static PRUint32 GetIndexedDBQuotaMB(); nsresult EnsureQuotaManagementForDirectory(nsIFile* aDirectory); + // Determine if the quota is lifted for the Window the current thread is + // using. + static inline bool + QuotaIsLifted() + { + IndexedDatabaseManager* mgr = Get(); + NS_ASSERTION(mgr, "Must have a manager here!"); + + return mgr->QuotaIsLiftedInternal(); + } + + static inline void + CancelPromptsForWindow(nsPIDOMWindow* aWindow) + { + IndexedDatabaseManager* mgr = Get(); + NS_ASSERTION(mgr, "Must have a manager here!"); + + mgr->CancelPromptsForWindowInternal(aWindow); + } + + static nsresult + GetASCIIOriginFromWindow(nsPIDOMWindow* aWindow, nsCString& aASCIIOrigin); + private: IndexedDatabaseManager(); ~IndexedDatabaseManager(); @@ -147,6 +183,10 @@ private: WaitingOnDatabasesCallback aCallback, void* aClosure); + void SetCurrentWindowInternal(nsPIDOMWindow* aWindow); + bool QuotaIsLiftedInternal(); + void CancelPromptsForWindowInternal(nsPIDOMWindow* aWindow); + // Called when a database is created. bool RegisterDatabase(IDBDatabase* aDatabase); @@ -267,6 +307,15 @@ private: // Maintains a list of live databases per origin. nsClassHashtable > mLiveDatabases; + // TLS storage index for the current thread's window + PRUintn mCurrentWindowIndex; + + // Lock protecting mQuotaHelperHash + mozilla::Mutex mQuotaHelperMutex; + + // A map of Windows to the corresponding quota helper. + nsRefPtrHashtable, CheckQuotaHelper> mQuotaHelperHash; + // Maintains a list of origins that we're currently enumerating to gather // usage statistics. nsAutoTArray, 1> mUsageRunnables; @@ -290,6 +339,21 @@ private: nsTArray mTrackedQuotaPaths; }; +class AutoEnterWindow +{ +public: + AutoEnterWindow(nsPIDOMWindow* aWindow) + { + NS_ASSERTION(aWindow, "This should never be null!"); + IndexedDatabaseManager::SetCurrentWindow(aWindow); + } + + ~AutoEnterWindow() + { + IndexedDatabaseManager::SetCurrentWindow(nsnull); + } +}; + END_INDEXEDDB_NAMESPACE #endif /* mozilla_dom_indexeddb_indexeddatabasemanager_h__ */ diff --git a/dom/indexedDB/OpenDatabaseHelper.cpp b/dom/indexedDB/OpenDatabaseHelper.cpp index 6acef2285b10..d796efdae119 100644 --- a/dom/indexedDB/OpenDatabaseHelper.cpp +++ b/dom/indexedDB/OpenDatabaseHelper.cpp @@ -1090,6 +1090,14 @@ OpenDatabaseHelper::DoDatabaseWork() return NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR; } + NS_ASSERTION(mOpenDBRequest, "This should never be null!"); + + // Once we support IDB outside of Windows this assertion will no longer hold. + nsPIDOMWindow* window = mOpenDBRequest->Owner(); + NS_ASSERTION(window, "This should never be null"); + + AutoEnterWindow autoWindow(window); + nsCOMPtr dbFile; nsresult rv = GetDatabaseFile(mASCIIOrigin, mName, getter_AddRefs(dbFile)); NS_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR); diff --git a/dom/indexedDB/test/Makefile.in b/dom/indexedDB/test/Makefile.in index 3e5c0a244952..55676d0bc8f3 100644 --- a/dom/indexedDB/test/Makefile.in +++ b/dom/indexedDB/test/Makefile.in @@ -129,6 +129,8 @@ BROWSER_TEST_FILES = \ browser_quotaPrompt.html \ browser_quotaPromptAllow.js \ browser_quotaPromptDeny.js \ + browser_quotaPromptDatabases.html \ + browser_quotaPromptDatabases.js \ head.js \ $(NULL) diff --git a/dom/indexedDB/test/browser_quotaPromptAllow.js b/dom/indexedDB/test/browser_quotaPromptAllow.js index e9d317fe8847..400c35fcd23f 100644 --- a/dom/indexedDB/test/browser_quotaPromptAllow.js +++ b/dom/indexedDB/test/browser_quotaPromptAllow.js @@ -4,7 +4,7 @@ */ // Make sure this is a unique origin or the tests will randomly fail! -const testPageURL = "http://test1.example.org/browser/" + +const testPageURL = "http://bug704464-1.example.com/browser/" + "dom/indexedDB/test/browser_quotaPrompt.html"; const notificationID = "indexedDB-quota-prompt"; diff --git a/dom/indexedDB/test/browser_quotaPromptDatabases.html b/dom/indexedDB/test/browser_quotaPromptDatabases.html new file mode 100644 index 000000000000..352dac5ed469 --- /dev/null +++ b/dom/indexedDB/test/browser_quotaPromptDatabases.html @@ -0,0 +1,55 @@ + + + + Indexed Database Test + + + + + + + + + + diff --git a/dom/indexedDB/test/browser_quotaPromptDatabases.js b/dom/indexedDB/test/browser_quotaPromptDatabases.js new file mode 100644 index 000000000000..f11d0fafcdd2 --- /dev/null +++ b/dom/indexedDB/test/browser_quotaPromptDatabases.js @@ -0,0 +1,76 @@ +/** + * Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ + */ + +// Make sure this is a unique origin or the tests will randomly fail! +const testPageURL = "http://bug704464-3.example.com/browser/" + + "dom/indexedDB/test/browser_quotaPromptDatabases.html"; +const notificationID = "indexedDB-quota-prompt"; + +function test() +{ + waitForExplicitFinish(); + requestLongerTimeout(10); + setPermission(testPageURL, "indexedDB"); + removePermission(testPageURL, "indexedDB-unlimited"); + Services.prefs.setIntPref("dom.indexedDB.warningQuota", 2); + executeSoon(test1); +} + +let addMoreTest1Count = 0; + +function test1() +{ + gBrowser.selectedTab = gBrowser.addTab(); + + gBrowser.selectedBrowser.addEventListener("load", function () { + gBrowser.selectedBrowser.removeEventListener("load", arguments.callee, true); + + let seenPopupCount; + + setFinishedCallback(function(result) { + is(result, "ready", "Got 'ready' result"); + + setFinishedCallback(function(result) { + is(result, "complete", "Got 'complete' result"); + + if (addMoreTest1Count >= seenPopupCount + 5) { + setFinishedCallback(function(result) { + is(result, "finished", "Got 'finished' result"); + is(getPermission(testPageURL, "indexedDB-unlimited"), + Components.interfaces.nsIPermissionManager.ALLOW_ACTION, + "Correct permission set"); + gBrowser.removeCurrentTab(); + unregisterAllPopupEventHandlers(); + addMoreTest1Count = seenPopupCount; + executeSoon(finish); + }); + executeSoon(function() { dispatchEvent("indexedDB-done"); }); + } + else { + ++addMoreTest1Count; + executeSoon(function() { dispatchEvent("indexedDB-addMore"); }); + } + }); + ++addMoreTest1Count; + executeSoon(function() { dispatchEvent("indexedDB-addMore"); }); + }); + + registerPopupEventHandler("popupshowing", function () { + ok(true, "prompt showing"); + seenPopupCount = addMoreTest1Count - 1; + }); + registerPopupEventHandler("popupshown", function () { + ok(true, "prompt shown"); + triggerMainCommand(this); + }); + registerPopupEventHandler("popuphidden", function () { + ok(true, "prompt hidden"); + }); + + }, true); + + info("loading test page: " + testPageURL); + content.location = testPageURL; +} diff --git a/dom/indexedDB/test/browser_quotaPromptDeny.js b/dom/indexedDB/test/browser_quotaPromptDeny.js index e8344428ef81..ac0aadf51928 100644 --- a/dom/indexedDB/test/browser_quotaPromptDeny.js +++ b/dom/indexedDB/test/browser_quotaPromptDeny.js @@ -4,7 +4,7 @@ */ // Make sure this is a unique origin or the tests will randomly fail! -const testPageURL = "http://test2.example.org/browser/" + +const testPageURL = "http://bug704464-2.example.com/browser/" + "dom/indexedDB/test/browser_quotaPrompt.html"; const notificationID = "indexedDB-quota-prompt"; diff --git a/dom/interfaces/base/nsIDOMWindowUtils.idl b/dom/interfaces/base/nsIDOMWindowUtils.idl index 09dd00d6dc42..8d91b22b59ad 100644 --- a/dom/interfaces/base/nsIDOMWindowUtils.idl +++ b/dom/interfaces/base/nsIDOMWindowUtils.idl @@ -68,7 +68,7 @@ interface nsIDOMWindow; interface nsIDOMFile; interface nsIFile; -[scriptable, uuid(c5cf91b3-0b89-4417-b13c-5540ba6ebde8)] +[scriptable, uuid(bf868921-0288-4799-a806-2fa642590197)] interface nsIDOMWindowUtils : nsISupports { /** @@ -557,7 +557,7 @@ interface nsIDOMWindowUtils : nsISupports { * Returns the real classname (possibly of the mostly-transparent security * wrapper) of aObj. */ - string getClassName(/*in JSObjectPtr aObj*/); + [implicit_jscontext] string getClassName(in jsval aObject); /** * Generate a content command event. diff --git a/dom/plugins/base/nsJSNPRuntime.cpp b/dom/plugins/base/nsJSNPRuntime.cpp index 9c8d62a8ac9b..01aaac196f57 100644 --- a/dom/plugins/base/nsJSNPRuntime.cpp +++ b/dom/plugins/base/nsJSNPRuntime.cpp @@ -40,6 +40,7 @@ // FIXME(bug 332648): Give me a real API please! #include "jscntxt.h" +#include "jsfriendapi.h" #include "nsIInterfaceRequestorUtils.h" #include "nsJSNPRuntime.h" @@ -1483,7 +1484,7 @@ CallNPMethodInternal(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, // the function object. if (npobj->_class->invoke) { - JSFunction *fun = (JSFunction *)::JS_GetPrivate(cx, funobj); + JSFunction *fun = ::JS_GetObjectFunction(funobj); JSString *name = ::JS_InternJSString(cx, ::JS_GetFunctionId(fun)); NPIdentifier id = StringToNPIdentifier(cx, name); diff --git a/dom/workers/Worker.cpp b/dom/workers/Worker.cpp index d65e128a3146..a4c571a47cf5 100644 --- a/dom/workers/Worker.cpp +++ b/dom/workers/Worker.cpp @@ -91,8 +91,8 @@ public: InitClass(JSContext* aCx, JSObject* aObj, JSObject* aParentProto, bool aMainRuntime) { - JSObject* proto = JS_InitClass(aCx, aObj, aParentProto, &sClass, Construct, - 0, sProperties, sFunctions, NULL, NULL); + JSObject* proto = js::InitClassWithReserved(aCx, aObj, aParentProto, &sClass, Construct, + 0, sProperties, sFunctions, NULL, NULL); if (!proto) { return NULL; } @@ -102,11 +102,10 @@ public: parent->AssertIsOnWorkerThread(); JSObject* constructor = JS_GetConstructor(aCx, proto); - if (!constructor || - !JS_SetReservedSlot(aCx, constructor, CONSTRUCTOR_SLOT_PARENT, - PRIVATE_TO_JSVAL(parent))) { + if (!constructor) return NULL; - } + js::SetFunctionNativeReserved(constructor, CONSTRUCTOR_SLOT_PARENT, + PRIVATE_TO_JSVAL(parent)); } return proto; @@ -153,11 +152,8 @@ protected: return false; } - jsval priv; - if (!JS_GetReservedSlot(aCx, JSVAL_TO_OBJECT(JS_CALLEE(aCx, aVp)), - CONSTRUCTOR_SLOT_PARENT, &priv)) { - return false; - } + jsval priv = js::GetFunctionNativeReserved(JSVAL_TO_OBJECT(JS_CALLEE(aCx, aVp)), + CONSTRUCTOR_SLOT_PARENT); RuntimeService* runtimeService; WorkerPrivate* parent; @@ -345,8 +341,8 @@ public: InitClass(JSContext* aCx, JSObject* aObj, JSObject* aParentProto, bool aMainRuntime) { - JSObject* proto = JS_InitClass(aCx, aObj, aParentProto, &sClass, Construct, - 0, NULL, NULL, NULL, NULL); + JSObject* proto = js::InitClassWithReserved(aCx, aObj, aParentProto, &sClass, Construct, + 0, NULL, NULL, NULL, NULL); if (!proto) { return NULL; } @@ -356,11 +352,10 @@ public: parent->AssertIsOnWorkerThread(); JSObject* constructor = JS_GetConstructor(aCx, proto); - if (!constructor || - !JS_SetReservedSlot(aCx, constructor, CONSTRUCTOR_SLOT_PARENT, - PRIVATE_TO_JSVAL(parent))) { + if (!constructor) return NULL; - } + js::SetFunctionNativeReserved(constructor, CONSTRUCTOR_SLOT_PARENT, + PRIVATE_TO_JSVAL(parent)); } return proto; diff --git a/dom/workers/WorkerScope.cpp b/dom/workers/WorkerScope.cpp index 82f540192ca3..7eda4f772419 100644 --- a/dom/workers/WorkerScope.cpp +++ b/dom/workers/WorkerScope.cpp @@ -271,11 +271,8 @@ private: JSObject* wrapper = JSVAL_TO_OBJECT(JS_CALLEE(aCx, aVp)); JS_ASSERT(JS_ObjectIsFunction(aCx, wrapper)); - jsval scope, listener; - if (!JS_GetReservedSlot(aCx, wrapper, SLOT_wrappedScope, &scope) || - !JS_GetReservedSlot(aCx, wrapper, SLOT_wrappedFunction, &listener)) { - return false; - } + jsval scope = js::GetFunctionNativeReserved(wrapper, SLOT_wrappedScope); + jsval listener = js::GetFunctionNativeReserved(wrapper, SLOT_wrappedFunction); JS_ASSERT(JSVAL_IS_OBJECT(scope)); @@ -319,11 +316,8 @@ private: JS_ASSERT(JSVAL_IS_OBJECT(adaptor)); - jsval listener; - if (!JS_GetReservedSlot(aCx, JSVAL_TO_OBJECT(adaptor), SLOT_wrappedFunction, - &listener)) { - return false; - } + jsval listener = js::GetFunctionNativeReserved(JSVAL_TO_OBJECT(adaptor), + SLOT_wrappedFunction); *aVp = listener; return true; @@ -339,8 +333,8 @@ private: return false; } - JSFunction* adaptor = JS_NewFunction(aCx, UnwrapErrorEvent, 1, 0, - JS_GetGlobalObject(aCx), "unwrap"); + JSFunction* adaptor = js::NewFunctionWithReserved(aCx, UnwrapErrorEvent, 1, 0, + JS_GetGlobalObject(aCx), "unwrap"); if (!adaptor) { return false; } @@ -350,11 +344,9 @@ private: return false; } - if (!JS_SetReservedSlot(aCx, listener, SLOT_wrappedScope, - OBJECT_TO_JSVAL(aObj)) || - !JS_SetReservedSlot(aCx, listener, SLOT_wrappedFunction, *aVp)) { - return false; - } + js::SetFunctionNativeReserved(listener, SLOT_wrappedScope, + OBJECT_TO_JSVAL(aObj)); + js::SetFunctionNativeReserved(listener, SLOT_wrappedFunction, *aVp); jsval val = OBJECT_TO_JSVAL(listener); return scope->SetEventListenerOnEventTarget(aCx, name + 2, &val); diff --git a/editor/libeditor/base/nsEditor.cpp b/editor/libeditor/base/nsEditor.cpp index b65677beea5f..5dc8e2b1b62f 100644 --- a/editor/libeditor/base/nsEditor.cpp +++ b/editor/libeditor/base/nsEditor.cpp @@ -1022,7 +1022,7 @@ nsEditor::GetDocumentIsEmpty(bool *aDocumentIsEmpty) { *aDocumentIsEmpty = true; - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(rootElement, NS_ERROR_NULL_POINTER); bool hasChildNodes; @@ -1063,7 +1063,7 @@ NS_IMETHODIMP nsEditor::BeginningOfDocument() NS_ENSURE_TRUE(selection, NS_ERROR_NOT_INITIALIZED); // get the root element - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(rootElement, NS_ERROR_NULL_POINTER); // find first editable thingy @@ -1109,12 +1109,9 @@ nsEditor::EndOfDocument() NS_ENSURE_TRUE(selection, NS_ERROR_NULL_POINTER); // get the root element - nsIDOMElement *rootElement = GetRoot(); - NS_ENSURE_TRUE(rootElement, NS_ERROR_NULL_POINTER); - - nsCOMPtr node = do_QueryInterface(rootElement); + nsCOMPtr node = do_QueryInterface(GetRoot()); + NS_ENSURE_TRUE(node, NS_ERROR_NULL_POINTER); nsCOMPtr child; - NS_ASSERTION(node, "Invalid root element"); do { node->GetLastChild(getter_AddRefs(child)); @@ -1978,18 +1975,14 @@ nsEditor::GetPhonetic(nsAString& aPhonetic) static nsresult -GetEditorContentWindow(nsIDOMElement *aRoot, nsIWidget **aResult) +GetEditorContentWindow(dom::Element *aRoot, nsIWidget **aResult) { NS_ENSURE_TRUE(aRoot && aResult, NS_ERROR_NULL_POINTER); *aResult = 0; - nsCOMPtr content = do_QueryInterface(aRoot); - - NS_ENSURE_TRUE(content, NS_ERROR_FAILURE); - // Not ref counted - nsIFrame *frame = content->GetPrimaryFrame(); + nsIFrame *frame = aRoot->GetPrimaryFrame(); NS_ENSURE_TRUE(frame, NS_ERROR_FAILURE); @@ -2103,8 +2096,8 @@ nsEditor::GetRootElement(nsIDOMElement **aRootElement) { NS_ENSURE_ARG_POINTER(aRootElement); NS_ENSURE_TRUE(mRootElement, NS_ERROR_NOT_AVAILABLE); - *aRootElement = mRootElement; - NS_ADDREF(*aRootElement); + nsCOMPtr rootElement = do_QueryInterface(mRootElement); + rootElement.forget(aRootElement); return NS_OK; } @@ -2175,12 +2168,10 @@ nsEditor::CloneAttributes(nsIDOMNode *aDestNode, nsIDOMNode *aSourceNode) // Use transaction system for undo only if destination // is already in the document - nsIDOMElement *rootElement = GetRoot(); - NS_ENSURE_TRUE(rootElement, NS_ERROR_NULL_POINTER); - - bool destInBody = true; - nsCOMPtr rootNode = do_QueryInterface(rootElement); nsCOMPtr p = aDestNode; + nsCOMPtr rootNode = do_QueryInterface(GetRoot()); + NS_ENSURE_TRUE(rootNode, NS_ERROR_NULL_POINTER); + bool destInBody = true; while (p && p != rootNode) { nsCOMPtr tmp; @@ -2292,11 +2283,12 @@ NS_IMETHODIMP nsEditor::InsertTextImpl(const nsAString& aStringToInsert, if (!mInIMEMode && aStringToInsert.IsEmpty()) return NS_OK; nsCOMPtr nodeAsText = do_QueryInterface(*aInOutNode); if (!nodeAsText && IsPlaintextEditor()) { + nsCOMPtr rootNode = do_QueryInterface(GetRoot()); // In some cases, aInOutNode is the anonymous DIV, and aInOutOffset is 0. // To avoid injecting unneeded text nodes, we first look to see if we have // one available. In that case, we'll just adjust aInOutNode and aInOutOffset // accordingly. - if (*aInOutNode == GetRoot() && *aInOutOffset == 0) { + if (*aInOutNode == rootNode && *aInOutOffset == 0) { nsCOMPtr possibleTextNode; res = (*aInOutNode)->GetFirstChild(getter_AddRefs(possibleTextNode)); if (NS_SUCCEEDED(res)) { @@ -2309,7 +2301,7 @@ NS_IMETHODIMP nsEditor::InsertTextImpl(const nsAString& aStringToInsert, // In some other cases, aInOutNode is the anonymous DIV, and aInOutOffset points // to the terminating mozBR. In that case, we'll adjust aInOutNode and aInOutOffset // to the preceding text node, if any. - if (!nodeAsText && *aInOutNode == GetRoot() && *aInOutOffset > 0) { + if (!nodeAsText && *aInOutNode == rootNode && *aInOutOffset > 0) { nsCOMPtr children; res = (*aInOutNode)->GetChildNodes(getter_AddRefs(children)); if (NS_SUCCEEDED(res)) { @@ -2363,7 +2355,7 @@ NS_IMETHODIMP nsEditor::InsertTextImpl(const nsAString& aStringToInsert, } else { nsCOMPtr parent; (*aInOutNode)->GetParentNode(getter_AddRefs(parent)); - if (parent == GetRoot()) { + if (parent == rootNode) { *aInOutNode = parent; } } @@ -2530,7 +2522,7 @@ NS_IMETHODIMP nsEditor::SelectEntireDocument(nsISelection *aSelection) { if (!aSelection) { return NS_ERROR_NULL_POINTER; } - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(GetRoot()); if (!rootElement) { return NS_ERROR_NOT_INITIALIZED; } return aSelection->SelectAllChildren(rootElement); @@ -3546,7 +3538,9 @@ nsEditor::IsRootNode(nsIDOMNode *inNode) { NS_ENSURE_TRUE(inNode, false); - return inNode == GetRoot(); + nsCOMPtr rootNode = do_QueryInterface(GetRoot()); + + return inNode == rootNode; } bool @@ -3554,11 +3548,9 @@ nsEditor::IsRootNode(nsINode *inNode) { NS_ENSURE_TRUE(inNode, false); - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootNode = GetRoot(); - nsCOMPtr node = do_QueryInterface(inNode); - - return node == rootElement; + return inNode == rootNode; } bool @@ -3572,7 +3564,7 @@ bool nsEditor::IsDescendantOfBody(nsINode *inNode) { NS_ENSURE_TRUE(inNode, false); - nsCOMPtr root = do_QueryInterface(GetRoot()); + nsCOMPtr root = GetRoot(); NS_ENSURE_TRUE(root, false); return nsContentUtils::ContentIsDescendantOf(inNode, root); @@ -5149,7 +5141,7 @@ nsEditor::HandleInlineSpellCheck(PRInt32 action, already_AddRefed nsEditor::FindSelectionRoot(nsINode *aNode) { - nsCOMPtr rootContent = do_QueryInterface(GetRoot()); + nsCOMPtr rootContent = GetRoot(); return rootContent.forget(); } @@ -5218,7 +5210,7 @@ nsEditor::InitializeSelection(nsIDOMEventTarget* aFocusEventTarget) return NS_OK; } -nsIDOMElement * +dom::Element * nsEditor::GetRoot() { if (!mRootElement) @@ -5236,18 +5228,14 @@ nsresult nsEditor::DetermineCurrentDirection() { // Get the current root direction from its frame - nsIDOMElement *rootElement = GetRoot(); - - nsresult rv; + dom::Element *rootElement = GetRoot(); // If we don't have an explicit direction, determine our direction // from the content's direction if (!(mFlags & (nsIPlaintextEditor::eEditorLeftToRight | nsIPlaintextEditor::eEditorRightToLeft))) { - nsCOMPtr content = do_QueryInterface(rootElement, &rv); - NS_ENSURE_SUCCESS(rv, rv); - nsIFrame* frame = content->GetPrimaryFrame(); + nsIFrame* frame = rootElement->GetPrimaryFrame(); NS_ENSURE_TRUE(frame, NS_ERROR_FAILURE); // Set the flag here, to enable us to use the same code path below. @@ -5266,8 +5254,7 @@ NS_IMETHODIMP nsEditor::SwitchTextDirection() { // Get the current root direction from its frame - nsIDOMElement *rootElement = GetRoot(); - + dom::Element *rootElement = GetRoot(); nsresult rv = DetermineCurrentDirection(); NS_ENSURE_SUCCESS(rv, rv); @@ -5277,13 +5264,13 @@ nsEditor::SwitchTextDirection() "Unexpected mutually exclusive flag"); mFlags &= ~nsIPlaintextEditor::eEditorRightToLeft; mFlags |= nsIPlaintextEditor::eEditorLeftToRight; - rv = rootElement->SetAttribute(NS_LITERAL_STRING("dir"), NS_LITERAL_STRING("ltr")); + rv = rootElement->SetAttr(kNameSpaceID_None, nsGkAtoms::dir, NS_LITERAL_STRING("ltr"), true); } else if (mFlags & nsIPlaintextEditor::eEditorLeftToRight) { NS_ASSERTION(!(mFlags & nsIPlaintextEditor::eEditorRightToLeft), "Unexpected mutually exclusive flag"); mFlags |= nsIPlaintextEditor::eEditorRightToLeft; mFlags &= ~nsIPlaintextEditor::eEditorLeftToRight; - rv = rootElement->SetAttribute(NS_LITERAL_STRING("dir"), NS_LITERAL_STRING("rtl")); + rv = rootElement->SetAttr(kNameSpaceID_None, nsGkAtoms::dir, NS_LITERAL_STRING("rtl"), true); } return rv; @@ -5293,8 +5280,7 @@ void nsEditor::SwitchTextDirectionTo(PRUint32 aDirection) { // Get the current root direction from its frame - nsIDOMElement *rootElement = GetRoot(); - + dom::Element *rootElement = GetRoot(); nsresult rv = DetermineCurrentDirection(); NS_ENSURE_SUCCESS(rv, ); @@ -5305,14 +5291,14 @@ nsEditor::SwitchTextDirectionTo(PRUint32 aDirection) "Unexpected mutually exclusive flag"); mFlags &= ~nsIPlaintextEditor::eEditorRightToLeft; mFlags |= nsIPlaintextEditor::eEditorLeftToRight; - rootElement->SetAttribute(NS_LITERAL_STRING("dir"), NS_LITERAL_STRING("ltr")); + rootElement->SetAttr(kNameSpaceID_None, nsGkAtoms::dir, NS_LITERAL_STRING("ltr"), true); } else if (aDirection == nsIPlaintextEditor::eEditorRightToLeft && (mFlags & nsIPlaintextEditor::eEditorLeftToRight)) { NS_ASSERTION(!(mFlags & nsIPlaintextEditor::eEditorRightToLeft), "Unexpected mutually exclusive flag"); mFlags |= nsIPlaintextEditor::eEditorRightToLeft; mFlags &= ~nsIPlaintextEditor::eEditorLeftToRight; - rootElement->SetAttribute(NS_LITERAL_STRING("dir"), NS_LITERAL_STRING("rtl")); + rootElement->SetAttr(kNameSpaceID_None, nsGkAtoms::dir, NS_LITERAL_STRING("rtl"), true); } } diff --git a/editor/libeditor/base/nsEditor.h b/editor/libeditor/base/nsEditor.h index de41d293dd67..6efb113f557b 100644 --- a/editor/libeditor/base/nsEditor.h +++ b/editor/libeditor/base/nsEditor.h @@ -651,7 +651,7 @@ public: virtual already_AddRefed GetDOMEventTarget() = 0; // Fast non-refcounting editor root element accessor - nsIDOMElement *GetRoot(); + mozilla::dom::Element *GetRoot(); // Accessor methods to flags bool IsPlaintextEditor() const @@ -763,8 +763,8 @@ public: protected: - PRUint32 mModCount; // number of modifications (for undo/redo stack) - PRUint32 mFlags; // behavior flags. See nsIPlaintextEditor.idl for the flags we use. + PRUint32 mModCount; // number of modifications (for undo/redo stack) + PRUint32 mFlags; // behavior flags. See nsIPlaintextEditor.idl for the flags we use. nsWeakPtr mSelConWeak; // weak reference to the nsISelectionController PRInt32 mUpdateCount; @@ -785,7 +785,7 @@ protected: nsSelectionState *mSelState; // saved selection state for placeholder txn batching nsSelectionState mSavedSel; // cached selection for nsAutoSelectionReset nsRangeUpdater mRangeUpdater; // utility class object for maintaining preserved ranges - nsCOMPtr mRootElement; // cached root node + nsCOMPtr mRootElement; // cached root node PRInt32 mAction; // the current editor action EDirection mDirection; // the current direction of editor action diff --git a/editor/libeditor/html/nsHTMLCSSUtils.cpp b/editor/libeditor/html/nsHTMLCSSUtils.cpp index a105e02d3041..4cbe2330987a 100644 --- a/editor/libeditor/html/nsHTMLCSSUtils.cpp +++ b/editor/libeditor/html/nsHTMLCSSUtils.cpp @@ -1401,17 +1401,3 @@ nsHTMLCSSUtils::SetCSSPropertyPixels(nsIDOMElement * aElement, s.AppendInt(aIntValue); return SetCSSProperty(aElement, aProperty, s + NS_LITERAL_STRING("px")); } - -nsresult -nsHTMLCSSUtils::RemoveCSSProperty(nsIDOMElement * aElement, - const nsAString & aProperty) -{ - nsCOMPtr cssDecl; - PRUint32 length; - nsresult res = GetInlineStyles(aElement, getter_AddRefs(cssDecl), &length); - if (NS_FAILED(res) || !cssDecl) return res; - - nsAutoString returnString; - return cssDecl->RemoveProperty(aProperty, returnString); -} - diff --git a/editor/libeditor/html/nsHTMLCSSUtils.h b/editor/libeditor/html/nsHTMLCSSUtils.h index 12fb8cab4909..48b4a16dd8d5 100644 --- a/editor/libeditor/html/nsHTMLCSSUtils.h +++ b/editor/libeditor/html/nsHTMLCSSUtils.h @@ -139,8 +139,6 @@ public: nsresult SetCSSPropertyPixels(nsIDOMElement * aElement, const nsAString & aProperty, PRInt32 aIntValue); - nsresult RemoveCSSProperty(nsIDOMElement * aElement, - const nsAString & aProperty); /** gets the specified/computed style value of a CSS property for a given node (or its element * ancestor if it is not an element) diff --git a/editor/libeditor/html/nsHTMLEditRules.cpp b/editor/libeditor/html/nsHTMLEditRules.cpp index b8231864f4f9..9b4411006491 100644 --- a/editor/libeditor/html/nsHTMLEditRules.cpp +++ b/editor/libeditor/html/nsHTMLEditRules.cpp @@ -81,6 +81,7 @@ #include "nsIHTMLDocument.h" #include "mozilla/Preferences.h" +#include "mozilla/dom/Element.h" using namespace mozilla; @@ -256,7 +257,7 @@ nsHTMLEditRules::Init(nsPlaintextEditor *aEditor) NS_ENSURE_TRUE(mUtilRange, NS_ERROR_NULL_POINTER); // set up mDocChangeRange to be whole doc - nsIDOMElement *rootElem = mHTMLEditor->GetRoot(); + nsCOMPtr rootElem = do_QueryInterface(mHTMLEditor->GetRoot()); if (rootElem) { // temporarily turn off rules sniffing @@ -805,7 +806,7 @@ nsHTMLEditRules::GetAlignment(bool *aMixed, nsIHTMLEditor::EAlignment *aAlign) // get selection location nsCOMPtr parent; - nsIDOMElement *rootElem = mHTMLEditor->GetRoot(); + nsCOMPtr rootElem = do_QueryInterface(mHTMLEditor->GetRoot()); NS_ENSURE_TRUE(rootElem, NS_ERROR_FAILURE); PRInt32 offset, rootOffset; @@ -1012,13 +1013,10 @@ nsHTMLEditRules::GetIndentState(bool *aCanIndent, bool *aCanOutdent) // in the parent hierarchy. // gather up info we need for test - nsCOMPtr parent, tmp, root; - nsIDOMElement *rootElem = mHTMLEditor->GetRoot(); - NS_ENSURE_TRUE(rootElem, NS_ERROR_NULL_POINTER); + nsCOMPtr parent, tmp, root = do_QueryInterface(mHTMLEditor->GetRoot()); + NS_ENSURE_TRUE(root, NS_ERROR_NULL_POINTER); nsCOMPtr selection; PRInt32 selOffset; - root = do_QueryInterface(rootElem); - NS_ENSURE_TRUE(root, NS_ERROR_NO_INTERFACE); res = mHTMLEditor->GetSelection(getter_AddRefs(selection)); NS_ENSURE_SUCCESS(res, res); NS_ENSURE_TRUE(selection, NS_ERROR_NULL_POINTER); @@ -1108,7 +1106,7 @@ nsHTMLEditRules::GetParagraphState(bool *aMixed, nsAString &outFormat) } // remember root node - nsIDOMElement *rootElem = mHTMLEditor->GetRoot(); + nsCOMPtr rootElem = do_QueryInterface(mHTMLEditor->GetRoot()); NS_ENSURE_TRUE(rootElem, NS_ERROR_NULL_POINTER); // loop through the nodes in selection and examine their paragraph format @@ -7778,9 +7776,8 @@ nsHTMLEditRules::AdjustSelection(nsISelection *aSelection, nsIEditor::EDirection // check if br can go into the destination node if (bIsEmptyNode && mHTMLEditor->CanContainTag(selNode, NS_LITERAL_STRING("br"))) { - nsIDOMElement *rootElement = mHTMLEditor->GetRoot(); - NS_ENSURE_TRUE(rootElement, NS_ERROR_FAILURE); - nsCOMPtr rootNode(do_QueryInterface(rootElement)); + nsCOMPtr rootNode = do_QueryInterface(mHTMLEditor->GetRoot()); + NS_ENSURE_TRUE(rootNode, NS_ERROR_FAILURE); if (selNode == rootNode) { // Our root node is completely empty. Don't add a
here. @@ -8359,7 +8356,7 @@ nsHTMLEditRules::ConfirmSelectionInBody() nsresult res = NS_OK; // get the body - nsIDOMElement *rootElement = mHTMLEditor->GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(mHTMLEditor->GetRoot()); NS_ENSURE_TRUE(rootElement, NS_ERROR_UNEXPECTED); // get the selection diff --git a/editor/libeditor/html/nsHTMLEditor.cpp b/editor/libeditor/html/nsHTMLEditor.cpp index 734669557bad..52fe4682ad06 100644 --- a/editor/libeditor/html/nsHTMLEditor.cpp +++ b/editor/libeditor/html/nsHTMLEditor.cpp @@ -381,28 +381,29 @@ nsHTMLEditor::GetRootElement(nsIDOMElement **aRootElement) // Use the HTML documents body element as the editor root if we didn't // get a root element during initialization. + nsCOMPtr rootElement; nsCOMPtr bodyElement; nsresult rv = GetBodyElement(getter_AddRefs(bodyElement)); NS_ENSURE_SUCCESS(rv, rv); if (bodyElement) { - mRootElement = bodyElement; + rootElement = bodyElement; } else { // If there is no HTML body element, // we should use the document root element instead. nsCOMPtr doc = do_QueryReferent(mDocWeak); NS_ENSURE_TRUE(doc, NS_ERROR_NOT_INITIALIZED); - rv = doc->GetDocumentElement(getter_AddRefs(mRootElement)); + rv = doc->GetDocumentElement(getter_AddRefs(rootElement)); NS_ENSURE_SUCCESS(rv, rv); // Document can have no elements - if (!mRootElement) { + if (!rootElement) { return NS_ERROR_NOT_AVAILABLE; } } - *aRootElement = mRootElement; - NS_ADDREF(*aRootElement); + mRootElement = do_QueryInterface(rootElement); + rootElement.forget(aRootElement); return NS_OK; } @@ -544,7 +545,7 @@ nsHTMLEditor::BeginningOfDocument() NS_ENSURE_TRUE(selection, NS_ERROR_NOT_INITIALIZED); // Get the root element. - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(GetRoot()); if (!rootElement) { NS_WARNING("GetRoot() returned a null pointer (mRootElement is null)"); return NS_OK; @@ -968,155 +969,6 @@ nsHTMLEditor::GetBlockNodeParent(nsIDOMNode *aNode) return p.forget(); } -/////////////////////////////////////////////////////////////////////////// -// GetBlockSection: return leftmost/rightmost nodes in aChild's block -// -nsresult -nsHTMLEditor::GetBlockSection(nsIDOMNode *aChild, - nsIDOMNode **aLeftNode, - nsIDOMNode **aRightNode) -{ - nsresult result = NS_OK; - if (!aChild || !aLeftNode || !aRightNode) {return NS_ERROR_NULL_POINTER;} - *aLeftNode = aChild; - *aRightNode = aChild; - - nsCOMPtrsibling; - result = aChild->GetPreviousSibling(getter_AddRefs(sibling)); - while ((NS_SUCCEEDED(result)) && sibling) - { - bool isBlock; - NodeIsBlockStatic(sibling, &isBlock); - if (isBlock) - { - nsCOMPtrnodeAsText = do_QueryInterface(sibling); - if (!nodeAsText) { - break; - } - // XXX: needs some logic to work for other leaf nodes besides text! - } - *aLeftNode = sibling; - result = (*aLeftNode)->GetPreviousSibling(getter_AddRefs(sibling)); - } - NS_ADDREF((*aLeftNode)); - // now do the right side - result = aChild->GetNextSibling(getter_AddRefs(sibling)); - while ((NS_SUCCEEDED(result)) && sibling) - { - bool isBlock; - NodeIsBlockStatic(sibling, &isBlock); - if (isBlock) - { - nsCOMPtrnodeAsText = do_QueryInterface(sibling); - if (!nodeAsText) { - break; - } - } - *aRightNode = sibling; - result = (*aRightNode)->GetNextSibling(getter_AddRefs(sibling)); - } - NS_ADDREF((*aRightNode)); - - return result; -} - - -/////////////////////////////////////////////////////////////////////////// -// GetBlockSectionsForRange: return list of block sections that intersect -// this range -nsresult -nsHTMLEditor::GetBlockSectionsForRange(nsIDOMRange *aRange, - nsCOMArray& aSections) -{ - if (!aRange) {return NS_ERROR_NULL_POINTER;} - - nsresult result; - nsCOMPtriter = - do_CreateInstance("@mozilla.org/content/post-content-iterator;1", &result); - if (NS_FAILED(result) || !iter) { - return result; - } - nsCOMPtr lastRange; - iter->Init(aRange); - while (iter->IsDone()) - { - nsCOMPtr currentContent = - do_QueryInterface(iter->GetCurrentNode()); - - nsCOMPtr currentNode = do_QueryInterface(currentContent); - if (currentNode) - { - //
divides block content ranges. We can achieve this by nulling out lastRange - if (currentContent->Tag() == nsEditProperty::br) - { - lastRange = nsnull; - } - else - { - bool isNotInlineOrText; - result = NodeIsBlockStatic(currentNode, &isNotInlineOrText); - if (isNotInlineOrText) - { - PRUint16 nodeType; - currentNode->GetNodeType(&nodeType); - if (nsIDOMNode::TEXT_NODE == nodeType) { - isNotInlineOrText = true; - } - } - if (!isNotInlineOrText) { - nsCOMPtr leftNode; - nsCOMPtr rightNode; - result = GetBlockSection(currentNode, - getter_AddRefs(leftNode), - getter_AddRefs(rightNode)); - if ((NS_SUCCEEDED(result)) && leftNode && rightNode) { - // Add range to the list if it doesn't overlap with the previous - // range. - bool addRange = true; - if (lastRange) - { - nsCOMPtr lastStartNode; - lastRange->GetStartContainer(getter_AddRefs(lastStartNode)); - nsCOMPtr blockParentNodeOfLastStartNode = - GetBlockNodeParent(lastStartNode); - nsCOMPtr blockParentOfLastStartNode = - do_QueryInterface(blockParentNodeOfLastStartNode); - if (blockParentOfLastStartNode) - { - nsCOMPtr blockParentNodeOfLeftNode = - GetBlockNodeParent(leftNode); - nsCOMPtr blockParentOfLeftNode = - do_QueryInterface(blockParentNodeOfLeftNode); - if (blockParentOfLeftNode && - blockParentOfLastStartNode == blockParentOfLeftNode) { - addRange = false; - } - } - } - if (addRange) { - nsCOMPtr range = - do_CreateInstance("@mozilla.org/content/range;1", &result); - if ((NS_SUCCEEDED(result)) && range) { - // Initialize the range. - range->SetStart(leftNode, 0); - range->SetEnd(rightNode, 0); - aSections.AppendObject(range); - lastRange = do_QueryInterface(range); - } - } - } - } - } - } - /* do not check result here, and especially do not return the result code. - * we rely on iter->IsDone to tell us when the iteration is complete - */ - iter->Next(); - } - return result; -} - - /////////////////////////////////////////////////////////////////////////// // NextNodeInBlock: gets the next/prev node in the block, if any. Next node // must be an element or text node, others are ignored @@ -1809,8 +1661,7 @@ nsHTMLEditor::RebuildDocumentFromSource(const nsAString& aSourceString) nsresult res = GetSelection(getter_AddRefs(selection)); NS_ENSURE_SUCCESS(res, res); - nsIDOMElement *bodyElement = GetRoot(); - NS_ENSURE_SUCCESS(res, res); + nsCOMPtr bodyElement = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(bodyElement, NS_ERROR_NULL_POINTER); // Find where the tag starts. @@ -2452,10 +2303,10 @@ nsHTMLEditor::GetHTMLBackgroundColorState(bool *aMixed, nsAString &aOutColor) } // If no table or cell found, get page body - element = GetRoot(); - NS_ENSURE_TRUE(element, NS_ERROR_NULL_POINTER); + mozilla::dom::Element *bodyElement = GetRoot(); + NS_ENSURE_TRUE(bodyElement, NS_ERROR_NULL_POINTER); - return element->GetAttribute(styleName, aOutColor); + return bodyElement->GetAttr(kNameSpaceID_None, nsGkAtoms::bgcolor, aOutColor); } NS_IMETHODIMP @@ -3392,7 +3243,7 @@ nsHTMLEditor::SetHTMLBackgroundColor(const nsAString& aColor) // If we failed to find a cell, fall through to use originally-found element } else { // No table element -- set the background color on the body tag - element = GetRoot(); + element = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(element, NS_ERROR_NULL_POINTER); } // Use the editor method that goes through the transaction system @@ -3411,8 +3262,7 @@ NS_IMETHODIMP nsHTMLEditor::SetBodyAttribute(const nsAString& aAttribute, const NS_ASSERTION(mDocWeak, "Missing Editor DOM Document"); // Set the background color attribute on the body tag - nsIDOMElement *bodyElement = GetRoot(); - + nsCOMPtr bodyElement = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(bodyElement, NS_ERROR_NULL_POINTER); // Use the editor method that goes through the transaction system @@ -3880,7 +3730,7 @@ already_AddRefed nsHTMLEditor::FindUserSelectAllNode(nsIDOMNode* aNode) { nsCOMPtr node = aNode; - nsIDOMElement *root = GetRoot(); + nsCOMPtr root = do_QueryInterface(GetRoot()); if (!nsEditorUtils::IsDescendantOf(aNode, root)) return nsnull; @@ -4145,7 +3995,7 @@ nsHTMLEditor::SelectEntireDocument(nsISelection *aSelection) nsCOMPtr kungFuDeathGrip(mRules); // get editor root node - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(GetRoot()); // is doc empty? bool bDocIsEmpty; @@ -4190,7 +4040,9 @@ nsHTMLEditor::SelectAll() NS_ENSURE_TRUE(selPriv, NS_ERROR_UNEXPECTED); rv = selPriv->SetAncestorLimiter(nsnull); NS_ENSURE_SUCCESS(rv, rv); - return selection->SelectAllChildren(mRootElement); + nsCOMPtr rootElement = do_QueryInterface(mRootElement, &rv); + NS_ENSURE_SUCCESS(rv, rv); + return selection->SelectAllChildren(rootElement); } nsCOMPtr ps = GetPresShell(); @@ -4481,7 +4333,7 @@ nsHTMLEditor::CollapseAdjacentTextNodes(nsIDOMRange *aInRange) NS_IMETHODIMP nsHTMLEditor::SetSelectionAtDocumentStart(nsISelection *aSelection) { - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(rootElement, NS_ERROR_NULL_POINTER); return aSelection->Collapse(rootElement,0); diff --git a/editor/libeditor/html/nsHTMLEditor.h b/editor/libeditor/html/nsHTMLEditor.h index 54ba368a19b7..2fc00cffaf5f 100644 --- a/editor/libeditor/html/nsHTMLEditor.h +++ b/editor/libeditor/html/nsHTMLEditor.h @@ -277,38 +277,6 @@ public: /* ------------ Block methods moved from nsEditor -------------- */ static already_AddRefed GetBlockNodeParent(nsIDOMNode *aNode); - /** Determines the bounding nodes for the block section containing aNode. - * The calculation is based on some nodes intrinsically being block elements - * acording to HTML. Style sheets are not considered in this calculation. - *
tags separate block content sections. So the HTML markup: - *
-    *      

text1
text2text3

- *
- * contains two block content sections. The first has the text node "text1" - * for both endpoints. The second has "text2" as the left endpoint and - * "text3" as the right endpoint. - * Notice that offsets aren't required, only leaf nodes. Offsets are implicit. - * - * @param aNode the block content returned includes aNode - * @param aLeftNode [OUT] the left endpoint of the block content containing aNode - * @param aRightNode [OUT] the right endpoint of the block content containing aNode - * - */ - static nsresult GetBlockSection(nsIDOMNode *aNode, - nsIDOMNode **aLeftNode, - nsIDOMNode **aRightNode); - - /** Compute the set of block sections in a given range. - * A block section is the set of (leftNode, rightNode) pairs given - * by GetBlockSection. The set is computed by computing the - * block section for every leaf node in the range and throwing - * out duplicates. - * - * @param aRange The range to compute block sections for. - * @param aSections Allocated storage for the resulting set, stored as nsIDOMRanges. - */ - static nsresult GetBlockSectionsForRange(nsIDOMRange *aRange, - nsCOMArray& aSections); static already_AddRefed NextNodeInBlock(nsIDOMNode *aNode, IterDirection aDir); nsresult IsNextCharWhitespace(nsIDOMNode *aParentNode, diff --git a/editor/libeditor/html/nsHTMLInlineTableEditor.cpp b/editor/libeditor/html/nsHTMLInlineTableEditor.cpp index ffa706038b77..0c70ca8c88f6 100644 --- a/editor/libeditor/html/nsHTMLInlineTableEditor.cpp +++ b/editor/libeditor/html/nsHTMLInlineTableEditor.cpp @@ -43,6 +43,7 @@ #include "nsIContent.h" #include "nsHTMLEditUtils.h" #include "nsReadableUtils.h" +#include "mozilla/dom/Element.h" // Uncomment the following line if you want to disable // table deletion when the only column/row is removed @@ -77,7 +78,7 @@ nsHTMLEditor::ShowInlineTableEditingUI(nsIDOMElement * aCell) } // the resizers and the shadow will be anonymous children of the body - nsIDOMElement *bodyElement = GetRoot(); + nsCOMPtr bodyElement = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(bodyElement, NS_ERROR_NULL_POINTER); CreateAnonymousElement(NS_LITERAL_STRING("a"), bodyElement, @@ -130,10 +131,7 @@ nsHTMLEditor::HideInlineTableEditingUI() // UnbindFromTree. // get the root content node. - - nsIDOMElement *bodyElement = GetRoot(); - - nsCOMPtr bodyContent( do_QueryInterface(bodyElement) ); + nsCOMPtr bodyContent = GetRoot(); NS_ENSURE_TRUE(bodyContent, NS_ERROR_FAILURE); DeleteRefToAnonymousNode(mAddColumnBeforeButton, bodyContent, ps); diff --git a/editor/libeditor/text/nsPlaintextEditor.cpp b/editor/libeditor/text/nsPlaintextEditor.cpp index 81f6a22606eb..72129ea2e246 100644 --- a/editor/libeditor/text/nsPlaintextEditor.cpp +++ b/editor/libeditor/text/nsPlaintextEditor.cpp @@ -75,6 +75,7 @@ #include "nsGkAtoms.h" #include "nsDebug.h" #include "mozilla/Preferences.h" +#include "mozilla/dom/Element.h" // Drag & Drop, Clipboard #include "nsIClipboard.h" @@ -571,7 +572,8 @@ nsPlaintextEditor::GetTextSelectionOffsets(nsISelection *aSelection, aSelection->GetFocusNode(getter_AddRefs(endNode)); aSelection->GetFocusOffset(&endNodeOffset); - nsIDOMElement* rootNode = GetRoot(); + dom::Element *rootElement = GetRoot(); + nsCOMPtr rootNode = do_QueryInterface(rootElement); NS_ENSURE_TRUE(rootNode, NS_ERROR_NULL_POINTER); PRInt32 startOffset = -1; @@ -585,8 +587,7 @@ nsPlaintextEditor::GetTextSelectionOffsets(nsISelection *aSelection, PRInt32 nodeCount = 0; // only needed for the assertions below #endif PRUint32 totalLength = 0; - nsCOMPtr rootContent = do_QueryInterface(rootNode); - iter->Init(rootContent); + iter->Init(rootElement); for (; !iter->IsDone() && (startOffset == -1 || endOffset == -1); iter->Next()) { nsCOMPtr currentNode = do_QueryInterface(iter->GetCurrentNode()); nsCOMPtr textNode = do_QueryInterface(currentNode); @@ -1020,16 +1021,15 @@ nsPlaintextEditor::GetTextLength(PRInt32 *aCount) if (docEmpty) return NS_OK; - nsIDOMElement* rootNode = GetRoot(); - NS_ENSURE_TRUE(rootNode, NS_ERROR_NULL_POINTER); + dom::Element *rootElement = GetRoot(); + NS_ENSURE_TRUE(rootElement, NS_ERROR_NULL_POINTER); nsCOMPtr iter = do_CreateInstance("@mozilla.org/content/post-content-iterator;1", &rv); NS_ENSURE_SUCCESS(rv, rv); PRUint32 totalLength = 0; - nsCOMPtr rootContent = do_QueryInterface(rootNode); - iter->Init(rootContent); + iter->Init(rootElement); for (; !iter->IsDone(); iter->Next()) { nsCOMPtr currentNode = do_QueryInterface(iter->GetCurrentNode()); nsCOMPtr textNode = do_QueryInterface(currentNode); @@ -1104,13 +1104,12 @@ nsPlaintextEditor::SetWrapWidth(PRInt32 aWrapColumn) // Ought to set a style sheet here ... // Probably should keep around an mPlaintextStyleSheet for this purpose. - nsIDOMElement *rootElement = GetRoot(); + dom::Element *rootElement = GetRoot(); NS_ENSURE_TRUE(rootElement, NS_ERROR_NULL_POINTER); // Get the current style for this root element: - NS_NAMED_LITERAL_STRING(styleName, "style"); nsAutoString styleValue; - nsresult res = rootElement->GetAttribute(styleName, styleValue); + nsresult res = rootElement->GetAttr(kNameSpaceID_None, nsGkAtoms::style, styleValue); NS_ENSURE_SUCCESS(res, res); // We'll replace styles for these values: @@ -1154,7 +1153,7 @@ nsPlaintextEditor::SetWrapWidth(PRInt32 aWrapColumn) else styleValue.AppendLiteral("white-space: pre;"); - return rootElement->SetAttribute(styleName, styleValue); + return rootElement->SetAttr(kNameSpaceID_None, nsGkAtoms::style, styleValue, true); } NS_IMETHODIMP @@ -1354,7 +1353,7 @@ nsPlaintextEditor::GetAndInitDocEncoder(const nsAString& aFormatType, // in which case we set the selection to encompass the root. else { - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(rootElement, NS_ERROR_FAILURE); if (!nsTextEditUtils::IsBody(rootElement)) { @@ -1684,7 +1683,7 @@ nsPlaintextEditor::SelectEntireDocument(nsISelection *aSelection) if (NS_SUCCEEDED(mRules->DocumentIsEmpty(&bDocIsEmpty)) && bDocIsEmpty) { // get root node - nsIDOMElement *rootElement = GetRoot(); + nsCOMPtr rootElement = do_QueryInterface(GetRoot()); NS_ENSURE_TRUE(rootElement, NS_ERROR_FAILURE); // if it's empty don't select entire doc - that would select the bogus node diff --git a/editor/libeditor/text/nsTextEditRules.cpp b/editor/libeditor/text/nsTextEditRules.cpp index 7f9e9cab2977..2d2a324f24cd 100644 --- a/editor/libeditor/text/nsTextEditRules.cpp +++ b/editor/libeditor/text/nsTextEditRules.cpp @@ -68,6 +68,7 @@ #include "mozilla/Preferences.h" #include "mozilla/LookAndFeel.h" +#include "mozilla/dom/Element.h" using namespace mozilla; @@ -460,8 +461,7 @@ nsTextEditRules::CollapseSelectionToTrailingBRIfNeeded(nsISelection* aSelection) &parentOffset); NS_ENSURE_SUCCESS(res, res); - nsIDOMElement *rootElem = mEditor->GetRoot(); - nsCOMPtr root = do_QueryInterface(rootElem); + nsCOMPtr root = do_QueryInterface(mEditor->GetRoot()); NS_ENSURE_TRUE(root, NS_ERROR_NULL_POINTER); if (parentNode != root) return NS_OK; @@ -949,7 +949,7 @@ nsTextEditRules::DidUndo(nsISelection *aSelection, nsresult aResult) } else { - nsIDOMElement *theRoot = mEditor->GetRoot(); + nsCOMPtr theRoot = do_QueryInterface(mEditor->GetRoot()); NS_ENSURE_TRUE(theRoot, NS_ERROR_FAILURE); nsCOMPtr node = mEditor->GetLeftmostChild(theRoot); if (node && mEditor->IsMozEditorBogusNode(node)) @@ -982,7 +982,7 @@ nsTextEditRules::DidRedo(nsISelection *aSelection, nsresult aResult) } else { - nsIDOMElement *theRoot = mEditor->GetRoot(); + nsCOMPtr theRoot = do_QueryInterface(mEditor->GetRoot()); NS_ENSURE_TRUE(theRoot, NS_ERROR_FAILURE); nsCOMPtr nodeList; @@ -1056,7 +1056,7 @@ nsTextEditRules::RemoveRedundantTrailingBR() if (IsSingleLineEditor()) return NS_OK; - nsIDOMNode* body = mEditor->GetRoot(); + nsCOMPtr body = do_QueryInterface(mEditor->GetRoot()); if (!body) return NS_ERROR_NULL_POINTER; @@ -1111,7 +1111,7 @@ nsTextEditRules::CreateTrailingBRIfNeeded() // but only if we aren't a single line edit field if (IsSingleLineEditor()) return NS_OK; - nsIDOMNode *body = mEditor->GetRoot(); + nsCOMPtr body = do_QueryInterface(mEditor->GetRoot()); NS_ENSURE_TRUE(body, NS_ERROR_NULL_POINTER); nsCOMPtr lastChild; nsresult res = body->GetLastChild(getter_AddRefs(lastChild)); @@ -1141,7 +1141,7 @@ nsTextEditRules::CreateBogusNodeIfNeeded(nsISelection *aSelection) // tell rules system to not do any post-processing nsAutoRules beginRulesSniffing(mEditor, nsEditor::kOpIgnore, nsIEditor::eNone); - nsIDOMNode* body = mEditor->GetRoot(); + nsCOMPtr body = do_QueryInterface(mEditor->GetRoot()); if (!body) { // we don't even have a body yet, don't insert any bogus nodes at diff --git a/gfx/gl/GLContext.h b/gfx/gl/GLContext.h index 2c014c74af39..d7cefb0824e5 100644 --- a/gfx/gl/GLContext.h +++ b/gfx/gl/GLContext.h @@ -78,7 +78,7 @@ namespace mozilla { namespace layers { class LayerManagerOGL; class ColorTextureLayerProgram; - }; + } namespace gl { class GLContext; @@ -542,6 +542,7 @@ public: #endif mIsGlobalSharedContext(false), mHasRobustness(false), + mContextLost(false), mVendor(-1), mDebugMode(0), mCreationFormat(aFormat), @@ -595,6 +596,8 @@ public: return MakeCurrentImpl(aForce); } + bool IsContextLost() { return mContextLost; } + virtual bool SetupLookupFunction() = 0; virtual void WindowDestroyed() {} @@ -1326,6 +1329,7 @@ protected: bool mIsGLES2; bool mIsGlobalSharedContext; bool mHasRobustness; + bool mContextLost; PRInt32 mVendor; diff --git a/gfx/gl/GLContextProviderEGL.cpp b/gfx/gl/GLContextProviderEGL.cpp index dcc6792c5658..4974f35e5718 100644 --- a/gfx/gl/GLContextProviderEGL.cpp +++ b/gfx/gl/GLContextProviderEGL.cpp @@ -834,6 +834,10 @@ public: succeeded = sEGLLibrary.fMakeCurrent(EGL_DISPLAY(), mSurface, mSurface, mContext); + if (!succeeded && sEGLLibrary.fGetError() == LOCAL_EGL_CONTEXT_LOST) { + mContextLost = true; + NS_WARNING("EGL context has been lost."); + } NS_ASSERTION(succeeded, "Failed to make GL context current!"); } diff --git a/gfx/gl/GLContextProviderWGL.cpp b/gfx/gl/GLContextProviderWGL.cpp index 7491575e08ff..48f53464877a 100644 --- a/gfx/gl/GLContextProviderWGL.cpp +++ b/gfx/gl/GLContextProviderWGL.cpp @@ -122,6 +122,13 @@ CreateDummyWindow(HDC *aWindowDC = nsnull) return win; } +static inline bool +HasExtension(const char* aExtensions, const char* aRequiredExtension) +{ + return GLContext::ListHasExtension( + reinterpret_cast(aExtensions), aRequiredExtension); +} + bool WGLLibrary::EnsureInitialized() { @@ -205,9 +212,47 @@ WGLLibrary::EnsureInitialized() fChoosePixelFormat = nsnull; } + LibrarySymbolLoader::SymLoadStruct extensionsSymbols[] = { + { (PRFuncPtr *) &fGetExtensionsString, { "wglGetExtensionsStringARB", NULL} }, + { NULL, { NULL } } + }; + + LibrarySymbolLoader::SymLoadStruct robustnessSymbols[] = { + { (PRFuncPtr *) &fCreateContextAttribs, { "wglCreateContextAttribsARB", NULL} }, + { NULL, { NULL } } + }; + + if (LibrarySymbolLoader::LoadSymbols(mOGLLibrary, &extensionsSymbols[0], + (LibrarySymbolLoader::PlatformLookupFunction)fGetProcAddress)) { + const char *wglExts = fGetExtensionsString(gSharedWindowDC); + if (wglExts && HasExtension(wglExts, "WGL_ARB_create_context")) { + LibrarySymbolLoader::LoadSymbols(mOGLLibrary, &robustnessSymbols[0], + (LibrarySymbolLoader::PlatformLookupFunction)fGetProcAddress); + if (HasExtension(wglExts, "WGL_ARB_create_context_robustness")) { + mHasRobustness = true; + } + } + } + // reset back to the previous context, just in case fMakeCurrent(curDC, curCtx); + if (mHasRobustness) { + fDeleteContext(gSharedWindowGLContext); + + int attribs[] = { + LOCAL_WGL_CONTEXT_FLAGS_ARB, LOCAL_WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB, + LOCAL_WGL_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB, LOCAL_WGL_LOSE_CONTEXT_ON_RESET_ARB, + NULL + }; + + gSharedWindowGLContext = fCreateContextAttribs(gSharedWindowDC, NULL, attribs); + if (!gSharedWindowGLContext) { + mHasRobustness = false; + gSharedWindowGLContext = fCreateContext(gSharedWindowDC); + } + } + mInitialized = true; // Call this to create the global GLContext instance, @@ -309,7 +354,7 @@ public: bool SupportsRobustness() { - return false; + return sWGLLibrary.HasRobustness(); } virtual bool SwapBuffers() { @@ -502,16 +547,35 @@ GLContextProviderWGL::CreateForWindow(nsIWidget *aWidget) HDC dc = (HDC)aWidget->GetNativeData(NS_NATIVE_GRAPHIC); SetPixelFormat(dc, gSharedWindowPixelFormat, NULL); - HGLRC context = sWGLLibrary.fCreateContext(dc); - if (!context) { - return nsnull; - } + HGLRC context; GLContextWGL *shareContext = GetGlobalContextWGL(); - if (shareContext && - !sWGLLibrary.fShareLists(shareContext->Context(), context)) - { - shareContext = nsnull; + + if (sWGLLibrary.HasRobustness()) { + int attribs[] = { + LOCAL_WGL_CONTEXT_FLAGS_ARB, LOCAL_WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB, + LOCAL_WGL_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB, LOCAL_WGL_LOSE_CONTEXT_ON_RESET_ARB, + NULL + }; + + context = sWGLLibrary.fCreateContextAttribs(dc, + shareContext ? shareContext->Context() : nsnull, + attribs); + if (!context && shareContext) { + context = sWGLLibrary.fCreateContextAttribs(dc, nsnull, attribs); + if (context) { + shareContext = nsnull; + } + } else { + context = sWGLLibrary.fCreateContext(dc); + if (context && shareContext && !sWGLLibrary.fShareLists(shareContext->Context(), context)) { + shareContext = nsnull; + } + } + } + + if (!context) { + return nsnull; } nsRefPtr glContext = new GLContextWGL(ContextFormat(ContextFormat::BasicRGB24), @@ -597,7 +661,19 @@ CreatePBufferOffscreenContext(const gfxIntSize& aSize, HDC pbdc = sWGLLibrary.fGetPbufferDC(pbuffer); NS_ASSERTION(pbdc, "expected a dc"); - HGLRC context = sWGLLibrary.fCreateContext(pbdc); + HGLRC context; + if (sWGLLibrary.HasRobustness()) { + int attribs[] = { + LOCAL_WGL_CONTEXT_FLAGS_ARB, LOCAL_WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB, + LOCAL_WGL_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB, LOCAL_WGL_LOSE_CONTEXT_ON_RESET_ARB, + NULL + }; + + context = sWGLLibrary.fCreateContextAttribs(pbdc, nsnull, attribs); + } else { + context = sWGLLibrary.fCreateContext(pbdc); + } + if (!context) { sWGLLibrary.fDestroyPbuffer(pbuffer); return false; @@ -629,15 +705,28 @@ CreateWindowOffscreenContext(const ContextFormat& aFormat) } HGLRC context = sWGLLibrary.fCreateContext(dc); - if (!context) { - return nsnull; + if (sWGLLibrary.HasRobustness()) { + int attribs[] = { + LOCAL_WGL_CONTEXT_FLAGS_ARB, LOCAL_WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB, + LOCAL_WGL_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB, LOCAL_WGL_LOSE_CONTEXT_ON_RESET_ARB, + NULL + }; + + context = sWGLLibrary.fCreateContextAttribs(dc, shareContext->Context(), attribs); + } else { + context = sWGLLibrary.fCreateContext(dc); + if (context && shareContext && + !sWGLLibrary.fShareLists(shareContext->Context(), context)) + { + NS_WARNING("wglShareLists failed!"); + + sWGLLibrary.fDeleteContext(context); + DestroyWindow(win); + return nsnull; + } } - if (!sWGLLibrary.fShareLists(shareContext->Context(), context)) { - NS_WARNING("wglShareLists failed!"); - - sWGLLibrary.fDeleteContext(context); - DestroyWindow(win); + if (!context) { return nsnull; } diff --git a/gfx/gl/GLDefs.h b/gfx/gl/GLDefs.h index 4efb78992e08..e93167a29f36 100644 --- a/gfx/gl/GLDefs.h +++ b/gfx/gl/GLDefs.h @@ -3246,4 +3246,5 @@ typedef ptrdiff_t GLintptr; #define LOCAL_EGL_CORE_NATIVE_ENGINE 0x305B #define LOCAL_EGL_READ 0x305A #define LOCAL_EGL_DRAW 0x3059 +#define LOCAL_EGL_CONTEXT_LOST 0x300E #endif diff --git a/gfx/gl/Makefile.in b/gfx/gl/Makefile.in index a26a7038c2eb..f7c787029d45 100644 --- a/gfx/gl/Makefile.in +++ b/gfx/gl/Makefile.in @@ -1,3 +1,37 @@ +# ***** BEGIN LICENSE BLOCK ***** +# Version: MPL 1.1/GPL 2.0/LGPL 2.1 +# +# The contents of this file are subject to the Mozilla Public License Version +# 1.1 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.mozilla.org/MPL/ +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License +# for the specific language governing rights and limitations under the +# License. +# +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is Mozilla Foundation. +# Portions created by the Initial Developer are Copyright (C) 2011 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# +# Alternatively, the contents of this file may be used under the terms of +# either of the GNU General Public License Version 2 or later (the "GPL"), +# or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), +# in which case the provisions of the GPL or the LGPL are applicable instead +# of those above. If you wish to allow use of your version of this file only +# under the terms of either the GPL or the LGPL, and not to allow others to +# use your version of this file under the terms of the MPL, indicate your +# decision by deleting the provisions above and replace them with the notice +# and other provisions required by the GPL or the LGPL. If you do not delete +# the provisions above, a recipient may use your version of this file under +# the terms of any one of the MPL, the GPL or the LGPL. +# +# ***** END LICENSE BLOCK ***** DEPTH = ../.. topsrcdir = @top_srcdir@ diff --git a/gfx/gl/WGLLibrary.h b/gfx/gl/WGLLibrary.h index b2cb9042c5ca..d062d3f0fb32 100644 --- a/gfx/gl/WGLLibrary.h +++ b/gfx/gl/WGLLibrary.h @@ -42,7 +42,8 @@ namespace gl { class WGLLibrary { public: - WGLLibrary() : mInitialized(false), mOGLLibrary(nsnull) {} + WGLLibrary() : mInitialized(false), mOGLLibrary(nsnull), + mHasRobustness(false) {} typedef HGLRC (GLAPIENTRY * PFNWGLCREATECONTEXTPROC) (HDC); PFNWGLCREATECONTEXTPROC fCreateContext; @@ -76,11 +77,20 @@ public: typedef BOOL (WINAPI * PFNWGLGETPIXELFORMATATTRIBIVPROC) (HDC hdc, int iPixelFormat, int iLayerPlane, UINT nAttributes, int* piAttributes, int *piValues); PFNWGLGETPIXELFORMATATTRIBIVPROC fGetPixelFormatAttribiv; + typedef const char * (WINAPI * PFNWGLGETEXTENSIONSSTRINGPROC) (HDC hdc); + PFNWGLGETEXTENSIONSSTRINGPROC fGetExtensionsString; + + typedef HGLRC (WINAPI * PFNWGLCREATECONTEXTATTRIBSPROC) (HDC hdc, HGLRC hShareContext, const int *attribList); + PFNWGLCREATECONTEXTATTRIBSPROC fCreateContextAttribs; + bool EnsureInitialized(); + bool HasRobustness() const { return mHasRobustness; } + private: bool mInitialized; PRLibrary *mOGLLibrary; + bool mHasRobustness; }; // a global WGLLibrary instance diff --git a/gfx/qcms/Makefile.in b/gfx/qcms/Makefile.in index ddac8d5cded0..12cbabf133b9 100644 --- a/gfx/qcms/Makefile.in +++ b/gfx/qcms/Makefile.in @@ -1,3 +1,38 @@ +# ***** BEGIN LICENSE BLOCK ***** +# Version: MPL 1.1/GPL 2.0/LGPL 2.1 +# +# The contents of this file are subject to the Mozilla Public License Version +# 1.1 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.mozilla.org/MPL/ +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License +# for the specific language governing rights and limitations under the +# License. +# +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is Mozilla Foundation. +# Portions created by the Initial Developer are Copyright (C) 2011 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# +# Alternatively, the contents of this file may be used under the terms of +# either of the GNU General Public License Version 2 or later (the "GPL"), +# or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), +# in which case the provisions of the GPL or the LGPL are applicable instead +# of those above. If you wish to allow use of your version of this file only +# under the terms of either the GPL or the LGPL, and not to allow others to +# use your version of this file under the terms of the MPL, indicate your +# decision by deleting the provisions above and replace them with the notice +# and other provisions required by the GPL or the LGPL. If you do not delete +# the provisions above, a recipient may use your version of this file under +# the terms of any one of the MPL, the GPL or the LGPL. +# +# ***** END LICENSE BLOCK ***** + DEPTH = ../.. topsrcdir = @top_srcdir@ srcdir = @srcdir@ diff --git a/gfx/src/nsScriptableRegion.cpp b/gfx/src/nsScriptableRegion.cpp index 571b2e536a3b..ce41f5f03c12 100644 --- a/gfx/src/nsScriptableRegion.cpp +++ b/gfx/src/nsScriptableRegion.cpp @@ -162,6 +162,10 @@ NS_IMETHODIMP nsScriptableRegion::GetRects(JSContext* aCx, JS::Value* aRects) } JSObject* destArray = JS_NewArrayObject(aCx, numRects * 4, NULL); + if (!destArray) { + return NS_ERROR_OUT_OF_MEMORY; + } + *aRects = OBJECT_TO_JSVAL(destArray); uint32 n = 0; diff --git a/gfx/thebes/Makefile.in b/gfx/thebes/Makefile.in index ec6005a0ebc4..85655c1ced4b 100644 --- a/gfx/thebes/Makefile.in +++ b/gfx/thebes/Makefile.in @@ -1,3 +1,37 @@ +# ***** BEGIN LICENSE BLOCK ***** +# Version: MPL 1.1/GPL 2.0/LGPL 2.1 +# +# The contents of this file are subject to the Mozilla Public License Version +# 1.1 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.mozilla.org/MPL/ +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License +# for the specific language governing rights and limitations under the +# License. +# +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is Mozilla Foundation. +# Portions created by the Initial Developer are Copyright (C) 2011 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# +# Alternatively, the contents of this file may be used under the terms of +# either of the GNU General Public License Version 2 or later (the "GPL"), +# or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), +# in which case the provisions of the GPL or the LGPL are applicable instead +# of those above. If you wish to allow use of your version of this file only +# under the terms of either the GPL or the LGPL, and not to allow others to +# use your version of this file under the terms of the MPL, indicate your +# decision by deleting the provisions above and replace them with the notice +# and other provisions required by the GPL or the LGPL. If you do not delete +# the provisions above, a recipient may use your version of this file under +# the terms of any one of the MPL, the GPL or the LGPL. +# +# ***** END LICENSE BLOCK ***** DEPTH = ../.. topsrcdir = @top_srcdir@ diff --git a/hal/fallback/FallbackHal.cpp b/hal/fallback/FallbackHal.cpp index c27b2e57fba0..8920ecadcab2 100644 --- a/hal/fallback/FallbackHal.cpp +++ b/hal/fallback/FallbackHal.cpp @@ -66,7 +66,7 @@ GetCurrentBatteryInformation(hal::BatteryInformation* aBatteryInfo) { aBatteryInfo->level() = dom::battery::kDefaultLevel; aBatteryInfo->charging() = dom::battery::kDefaultCharging; - aBatteryInfo->remainingTime() = dom::battery::kUnknownRemainingTime; + aBatteryInfo->remainingTime() = dom::battery::kDefaultRemainingTime; } } // hal_impl diff --git a/hal/linux/LinuxHal.cpp b/hal/linux/LinuxHal.cpp index 6aede0842a6c..f6b7c1b09622 100644 --- a/hal/linux/LinuxHal.cpp +++ b/hal/linux/LinuxHal.cpp @@ -66,7 +66,7 @@ GetCurrentBatteryInformation(hal::BatteryInformation* aBatteryInfo) { aBatteryInfo->level() = dom::battery::kDefaultLevel; aBatteryInfo->charging() = dom::battery::kDefaultCharging; - aBatteryInfo->remainingTime() = dom::battery::kUnknownRemainingTime; + aBatteryInfo->remainingTime() = dom::battery::kDefaultRemainingTime; } #endif // !MOZ_ENABLE_DBUS diff --git a/js/jsd/jsd_val.c b/js/jsd/jsd_val.c index c0f9ca3c9fa5..571a01f393d4 100644 --- a/js/jsd/jsd_val.c +++ b/js/jsd/jsd_val.c @@ -747,7 +747,7 @@ jsd_GetValueParent(JSDContext* jsdc, JSDValue* jsdval) return NULL; } - parent = JS_GetParent(jsdc->dumbContext,obj); + parent = JS_GetParentOrScopeChain(jsdc->dumbContext,obj); JS_LeaveCrossCompartmentCall(call); JS_EndRequest(jsdc->dumbContext); if(!parent) diff --git a/js/public/HashTable.h b/js/public/HashTable.h index 0c51dc22200f..36b96793ee8f 100644 --- a/js/public/HashTable.h +++ b/js/public/HashTable.h @@ -357,6 +357,7 @@ class HashTable : private AllocPolicy public: HashTable(AllocPolicy ap) : AllocPolicy(ap), + hashShift(sHashBits), entryCount(0), gen(0), removedCount(0), diff --git a/js/src/assembler/assembler/AbstractMacroAssembler.h b/js/src/assembler/assembler/AbstractMacroAssembler.h index 3c14c80bd4b0..604ea7924366 100644 --- a/js/src/assembler/assembler/AbstractMacroAssembler.h +++ b/js/src/assembler/assembler/AbstractMacroAssembler.h @@ -564,6 +564,11 @@ public: return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label); } + ptrdiff_t differenceBetween(DataLabelPtr from, Label to) + { + return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label); + } + ptrdiff_t differenceBetween(DataLabelPtr from, Jump to) { return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_jmp); diff --git a/js/src/assembler/assembler/MacroAssembler.h b/js/src/assembler/assembler/MacroAssembler.h index d53f93d9c05a..8bf9c0b0a80e 100644 --- a/js/src/assembler/assembler/MacroAssembler.h +++ b/js/src/assembler/assembler/MacroAssembler.h @@ -334,7 +334,7 @@ public: return branch32(cond, left, Imm32(right)); } - Jump branchPtr(Condition cond, AbsoluteAddress left, ImmPtr right) + Jump branchPtr(Condition cond, AbsoluteAddress left, ImmPtr right, RegisterID scratch) { return branch32(cond, left, Imm32(right)); } diff --git a/js/src/assembler/assembler/MacroAssemblerX86_64.h b/js/src/assembler/assembler/MacroAssemblerX86_64.h index 7ff99537c0a1..628c7fdb8a6a 100644 --- a/js/src/assembler/assembler/MacroAssemblerX86_64.h +++ b/js/src/assembler/assembler/MacroAssemblerX86_64.h @@ -431,6 +431,12 @@ public: return branchPtr(cond, Address(scratchRegister), right); } + Jump branchPtr(Condition cond, AbsoluteAddress left, ImmPtr right, RegisterID scratch) + { + move(ImmPtr(left.m_ptr), scratch); + return branchPtr(cond, Address(scratch), right); + } + Jump branchPtr(Condition cond, Address left, RegisterID right) { m_assembler.cmpq_rm(right, left.offset, left.base); diff --git a/js/src/assembler/assembler/RepatchBuffer.h b/js/src/assembler/assembler/RepatchBuffer.h index 8d28da73586a..003479211b43 100644 --- a/js/src/assembler/assembler/RepatchBuffer.h +++ b/js/src/assembler/assembler/RepatchBuffer.h @@ -110,9 +110,9 @@ public: MacroAssembler::repatchInt32(dataLabel32, value); } - void repatch(CodeLocationDataLabelPtr dataLabelPtr, void* value) + void repatch(CodeLocationDataLabelPtr dataLabelPtr, const void* value) { - MacroAssembler::repatchPointer(dataLabelPtr, value); + MacroAssembler::repatchPointer(dataLabelPtr, (void*) value); } void repatchLoadPtrToLEA(CodeLocationInstruction instruction) diff --git a/js/src/ctypes/CTypes.cpp b/js/src/ctypes/CTypes.cpp index 8dd0e5e2d766..71728cdfb678 100644 --- a/js/src/ctypes/CTypes.cpp +++ b/js/src/ctypes/CTypes.cpp @@ -641,7 +641,7 @@ InitTypeConstructor(JSContext* cx, JSObject*& typeProto, JSObject*& dataProto) { - JSFunction* fun = JS_DefineFunction(cx, parent, spec.name, spec.call, + JSFunction* fun = js::DefineFunctionWithReserved(cx, parent, spec.name, spec.call, spec.nargs, spec.flags); if (!fun) return false; @@ -672,8 +672,7 @@ InitTypeConstructor(JSContext* cx, // Stash ctypes.{Pointer,Array,Struct}Type.prototype on a reserved slot of // the type constructor, for faster lookup. - if (!JS_SetReservedSlot(cx, obj, SLOT_FN_CTORPROTO, OBJECT_TO_JSVAL(typeProto))) - return false; + js::SetFunctionNativeReserved(obj, SLOT_FN_CTORPROTO, OBJECT_TO_JSVAL(typeProto)); // Create an object to serve as the common ancestor for all CData objects // created from the given type constructor. This has ctypes.CData.prototype @@ -725,14 +724,18 @@ InitInt64Class(JSContext* cx, if (!JS_FreezeObject(cx, ctor)) return NULL; - // Stash ctypes.{Int64,UInt64}.prototype on a reserved slot of the 'join' - // function. - jsval join; - ASSERT_OK(JS_GetProperty(cx, ctor, "join", &join)); - if (!JS_SetReservedSlot(cx, JSVAL_TO_OBJECT(join), SLOT_FN_INT64PROTO, - OBJECT_TO_JSVAL(prototype))) + // Redefine the 'join' function as an extended native and stash + // ctypes.{Int64,UInt64}.prototype in a reserved slot of the new function. + JS_ASSERT(clasp == &sInt64ProtoClass || clasp == &sUInt64ProtoClass); + JSNative native = (clasp == &sInt64ProtoClass) ? Int64::Join : UInt64::Join; + JSFunction* fun = js::DefineFunctionWithReserved(cx, ctor, "join", native, + 2, CTYPESFN_FLAGS); + if (!fun) return NULL; + js::SetFunctionNativeReserved(fun, SLOT_FN_INT64PROTO, + OBJECT_TO_JSVAL(prototype)); + if (!JS_FreezeObject(cx, prototype)) return NULL; @@ -3045,8 +3048,7 @@ CType::GetProtoFromCtor(JSContext* cx, JSObject* obj, CTypeProtoSlot slot) { // Get ctypes.{Pointer,Array,Struct}Type.prototype from a reserved slot // on the type constructor. - jsval protoslot; - ASSERT_OK(JS_GetReservedSlot(cx, obj, SLOT_FN_CTORPROTO, &protoslot)); + jsval protoslot = js::GetFunctionNativeReserved(obj, SLOT_FN_CTORPROTO); JSObject* proto = JSVAL_TO_OBJECT(protoslot); JS_ASSERT(proto); JS_ASSERT(CType::IsCTypeProto(cx, proto)); @@ -6291,8 +6293,7 @@ Int64::Join(JSContext* cx, uintN argc, jsval* vp) // Get Int64.prototype from the function's reserved slot. JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)); - jsval slot; - ASSERT_OK(JS_GetReservedSlot(cx, callee, SLOT_FN_INT64PROTO, &slot)); + jsval slot = js::GetFunctionNativeReserved(callee, SLOT_FN_INT64PROTO); JSObject* proto = JSVAL_TO_OBJECT(slot); JS_ASSERT(JS_GET_CLASS(cx, proto) == &sInt64ProtoClass); @@ -6459,8 +6460,7 @@ UInt64::Join(JSContext* cx, uintN argc, jsval* vp) // Get UInt64.prototype from the function's reserved slot. JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)); - jsval slot; - ASSERT_OK(JS_GetReservedSlot(cx, callee, SLOT_FN_INT64PROTO, &slot)); + jsval slot = js::GetFunctionNativeReserved(callee, SLOT_FN_INT64PROTO); JSObject* proto = JSVAL_TO_OBJECT(slot); JS_ASSERT(JS_GET_CLASS(cx, proto) == &sUInt64ProtoClass); diff --git a/js/src/frontend/BytecodeCompiler.cpp b/js/src/frontend/BytecodeCompiler.cpp index 738165274c41..94a5d8019e99 100644 --- a/js/src/frontend/BytecodeCompiler.cpp +++ b/js/src/frontend/BytecodeCompiler.cpp @@ -93,7 +93,7 @@ DefineGlobals(JSContext *cx, GlobalScope &globalScope, JSScript *script) JSPROP_ENUMERATE | JSPROP_PERMANENT, 0, 0, DNP_SKIP_TYPE); if (!shape) return false; - def.knownSlot = shape->slot; + def.knownSlot = shape->slot(); } Vector worklist(cx); @@ -123,10 +123,10 @@ DefineGlobals(JSContext *cx, GlobalScope &globalScope, JSScript *script) JSObject *obj = arr->vector[i]; if (!obj->isFunction()) continue; - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); JS_ASSERT(fun->isInterpreted()); JSScript *inner = fun->script(); - if (outer->isHeavyweightFunction) { + if (outer->function() && outer->function()->isHeavyweight()) { outer->isOuterFunction = true; inner->isInnerFunction = true; } diff --git a/js/src/frontend/BytecodeEmitter.cpp b/js/src/frontend/BytecodeEmitter.cpp index 533921e4a307..83894cd5e4a1 100644 --- a/js/src/frontend/BytecodeEmitter.cpp +++ b/js/src/frontend/BytecodeEmitter.cpp @@ -1383,7 +1383,7 @@ frontend::PushBlockScope(TreeContext *tc, StmtInfo *stmt, ObjectBox *blockBox, p PushStatement(tc, stmt, STMT_BLOCK, top); stmt->flags |= SIF_SCOPE; blockBox->parent = tc->blockChainBox; - blockBox->object->setParent(tc->blockChain()); + blockBox->object->setStaticBlockScopeChain(tc->blockChain()); stmt->downScope = tc->topScopeStmt; tc->topScopeStmt = stmt; tc->blockChainBox = blockBox; @@ -1721,7 +1721,7 @@ frontend::LexicalLookup(TreeContext *tc, JSAtom *atom, jsint *slotp, StmtInfo *s if (slotp) { JS_ASSERT(obj->getSlot(JSSLOT_BLOCK_DEPTH).isInt32()); - *slotp = obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32() + shape->shortid; + *slotp = obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32() + shape->shortid(); } return stmt; } @@ -1781,8 +1781,8 @@ LookupCompileTimeConstant(JSContext *cx, BytecodeEmitter *bce, JSAtom *atom, Val * from our variable object here. */ if (!shape->writable() && !shape->configurable() && - shape->hasDefaultGetter() && obj->containsSlot(shape->slot)) { - *constp = obj->getSlot(shape->slot); + shape->hasDefaultGetter() && obj->containsSlot(shape->slot())) { + *constp = obj->getSlot(shape->slot()); } } @@ -2020,8 +2020,13 @@ EmitEnterBlock(JSContext *cx, ParseNode *pn, BytecodeEmitter *bce) * js::Bindings::extensibleParents. */ if ((bce->flags & TCF_FUN_EXTENSIBLE_SCOPE) || - bce->bindings.extensibleParents()) - blockObj->setBlockOwnShape(cx); + bce->bindings.extensibleParents()) { + HeapPtrShape shape; + shape.init(blockObj->lastProperty()); + if (!Shape::setExtensibleParents(cx, &shape)) + return false; + blockObj->setLastPropertyInfallible(shape); + } return true; } @@ -2332,7 +2337,6 @@ BindNameToSlot(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn) JS_ASSERT(bce->inFunction()); JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, bce->roLexdeps->lookup(atom)); JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); - JS_ASSERT(bce->fun()->u.i.skipmin <= skip); /* * If op is a mutating opcode, this upvar's lookup skips too many levels, @@ -3844,13 +3848,6 @@ frontend::EmitFunctionScript(JSContext *cx, BytecodeEmitter *bce, ParseNode *bod bce->switchToMain(); } - if (bce->flags & TCF_FUN_UNBRAND_THIS) { - bce->switchToProlog(); - if (Emit1(cx, bce, JSOP_UNBRANDTHIS) < 0) - return false; - bce->switchToMain(); - } - return EmitTree(cx, bce, body) && Emit1(cx, bce, JSOP_STOP) >= 0 && JSScript::NewScriptFromEmitter(cx, bce); @@ -4756,7 +4753,7 @@ ParseNode::getConstantValue(JSContext *cx, bool strictChecks, Value *vp) case PNK_RC: { JS_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST)); - gc::AllocKind kind = GuessObjectGCKind(pn_count, false); + gc::AllocKind kind = GuessObjectGCKind(pn_count); JSObject *obj = NewBuiltinClassInstance(cx, &ObjectClass, kind); if (!obj) return false; @@ -5447,6 +5444,40 @@ EmitWith(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn) return PopStatementBCE(cx, bce); } +static bool +SetMethodFunction(JSContext *cx, FunctionBox *funbox, JSAtom *atom) +{ + /* + * Replace a boxed function with a new one with a method atom. Methods + * require a function with the extended size finalize kind, which normal + * functions don't have. We don't eagerly allocate functions with the + * expanded size for boxed functions, as most functions are not methods. + */ + JSFunction *fun = js_NewFunction(cx, NULL, NULL, + funbox->function()->nargs, + funbox->function()->flags, + funbox->function()->getParent(), + funbox->function()->atom, + JSFunction::ExtendedFinalizeKind); + if (!fun) + return false; + + JSScript *script = funbox->function()->script(); + if (script) { + fun->setScript(script); + if (!script->typeSetFunction(cx, fun)) + return false; + } + + JS_ASSERT(funbox->function()->joinable()); + fun->setJoinable(); + + fun->setMethodAtom(atom); + + funbox->object = fun; + return true; +} + static bool EmitForIn(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) { @@ -6329,6 +6360,8 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn) pn2->pn_left->isOp(JSOP_SETPROP) && pn2->pn_right->isOp(JSOP_LAMBDA) && pn2->pn_right->pn_funbox->joinable()) { + if (!SetMethodFunction(cx, pn2->pn_right->pn_funbox, pn2->pn_left->pn_atom)) + return JS_FALSE; pn2->pn_left->setOp(JSOP_SETMETHOD); } if (!EmitTree(cx, bce, pn2)) @@ -7096,7 +7129,7 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn) */ JSObject *obj = NULL; if (!bce->hasSharps() && bce->compileAndGo()) { - gc::AllocKind kind = GuessObjectGCKind(pn->pn_count, false); + gc::AllocKind kind = GuessObjectGCKind(pn->pn_count); obj = NewBuiltinClassInstance(cx, &ObjectClass, kind); if (!obj) return JS_FALSE; @@ -7144,6 +7177,8 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn) obj = NULL; op = JSOP_INITMETHOD; pn2->setOp(op); + if (!SetMethodFunction(cx, init->pn_funbox, pn3->pn_atom)) + return JS_FALSE; } else { /* * Disable NEWOBJECT on initializers that set __proto__, which has @@ -7171,11 +7206,6 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn) } } - if (bce->funbox && bce->funbox->shouldUnbrand(methodInits, slowMethodInits)) { - obj = NULL; - if (Emit1(cx, bce, JSOP_UNBRAND) < 0) - return JS_FALSE; - } if (!EmitEndInit(cx, bce, pn->pn_count)) return JS_FALSE; diff --git a/js/src/frontend/BytecodeEmitter.h b/js/src/frontend/BytecodeEmitter.h index 1170e43105f9..b0a4517b2daa 100644 --- a/js/src/frontend/BytecodeEmitter.h +++ b/js/src/frontend/BytecodeEmitter.h @@ -203,15 +203,6 @@ struct StmtInfo { /* bits 0x40000 and 0x80000 are unused */ -/* - * Flag signifying that the current function seems to be a constructor that - * sets this.foo to define "methods", at least one of which can't be a null - * closure, so we should avoid over-specializing property cache entries and - * trace inlining guards to method function object identity, which will vary - * per instance. - */ -#define TCF_FUN_UNBRAND_THIS 0x100000 - /* * "Module pattern", i.e., a lambda that is immediately applied and the whole * of an expression statement. diff --git a/js/src/frontend/ParseNode.cpp b/js/src/frontend/ParseNode.cpp index 939ccea31bb3..74256ed0792e 100644 --- a/js/src/frontend/ParseNode.cpp +++ b/js/src/frontend/ParseNode.cpp @@ -133,20 +133,6 @@ FunctionBox::scopeIsExtensible() const return tcflags & TCF_FUN_EXTENSIBLE_SCOPE; } -bool -FunctionBox::shouldUnbrand(uintN methods, uintN slowMethods) const -{ - if (slowMethods != 0) { - for (const FunctionBox *funbox = this; funbox; funbox = funbox->parent) { - if (!(funbox->tcflags & TCF_FUN_MODULE_PATTERN)) - return true; - if (funbox->inLoop) - return true; - } - } - return false; -} - /* Add |node| to |parser|'s free node list. */ void ParseNodeAllocator::freeNode(ParseNode *pn) diff --git a/js/src/frontend/ParseNode.h b/js/src/frontend/ParseNode.h index ff0551b0e94f..c10bb749ac96 100644 --- a/js/src/frontend/ParseNode.h +++ b/js/src/frontend/ParseNode.h @@ -1271,18 +1271,6 @@ struct FunctionBox : public ObjectBox * ancestor? */ bool scopeIsExtensible() const; - - /* - * Unbrand an object being initialized or constructed if any method cannot - * be joined to one compiler-created null closure shared among N different - * closure environments. - * - * We despecialize from caching function objects, caching slots or shapes - * instead, because an unbranded object may still have joined methods (for - * which shape->isMethod), since PropertyCache::fill gives precedence to - * joined methods over branded methods. - */ - bool shouldUnbrand(uintN methods, uintN slowMethods) const; }; struct FunctionBoxQueue { diff --git a/js/src/frontend/Parser.cpp b/js/src/frontend/Parser.cpp index 1ec7511db626..4356966131cd 100644 --- a/js/src/frontend/Parser.cpp +++ b/js/src/frontend/Parser.cpp @@ -533,7 +533,7 @@ js::CheckStrictParameters(JSContext *cx, TreeContext *tc) /* Start with lastVariable(), not lastArgument(), for destructuring. */ for (Shape::Range r = tc->bindings.lastVariable(); !r.empty(); r.popFront()) { - jsid id = r.front().propid; + jsid id = r.front().propid(); if (!JSID_IS_ATOM(id)) continue; @@ -932,8 +932,11 @@ Parser::newFunction(TreeContext *tc, JSAtom *atom, FunctionSyntaxKind kind) JSFUN_INTERPRETED | (kind == Expression ? JSFUN_LAMBDA : 0), parent, atom); if (fun && !tc->compileAndGo()) { - fun->clearParent(); - fun->clearType(); + if (!fun->clearParent(context)) + return NULL; + if (!fun->clearType(context)) + return NULL; + fun->setEnvironment(NULL); } return fun; } @@ -1169,7 +1172,7 @@ LeaveFunction(ParseNode *fn, TreeContext *funtc, PropertyName *funName = NULL, * we create it eagerly whenever parameters are (or might, in the case of * calls to eval) be assigned. */ - if (funtc->inStrictMode() && funbox->object->getFunctionPrivate()->nargs > 0) { + if (funtc->inStrictMode() && funbox->object->toFunction()->nargs > 0) { AtomDeclsIter iter(&funtc->decls); Definition *dn; @@ -1951,7 +1954,7 @@ BindLet(JSContext *cx, BindData *data, JSAtom *atom, TreeContext *tc) * BytecodeEmitter.cpp:EmitEnterBlock so they don't tie up unused space * in the so-called "static" prototype Block. */ - blockObj->setSlot(shape->slot, PrivateValue(pn)); + blockObj->setSlot(shape->slot(), PrivateValue(pn)); return true; } @@ -1965,7 +1968,7 @@ PopStatement(TreeContext *tc) JS_ASSERT(!obj->isClonedBlock()); for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) { - JSAtom *atom = JSID_TO_ATOM(r.front().propid); + JSAtom *atom = JSID_TO_ATOM(r.front().propid()); /* Beware the empty destructuring dummy. */ if (atom == tc->parser->context->runtime->atomState.emptyAtom) @@ -1973,14 +1976,7 @@ PopStatement(TreeContext *tc) tc->decls.remove(atom); } - /* - * js_CloneBlockObject requires obj's shape to be frozen. Compare - * Bindings::makeImmutable. - * - * (This is a second pass over the shapes, if obj has a dictionary, but - * that is rare.) - */ - obj->lastProp->freezeIfDictionary(); + JS_ASSERT(!obj->inDictionaryMode()); } PopStatementTC(tc); } @@ -2051,7 +2047,7 @@ DefineGlobal(ParseNode *pn, BytecodeEmitter *bce, PropertyName *name) return true; } - def = GlobalScope::GlobalDef(shape->slot); + def = GlobalScope::GlobalDef(shape->slot()); } else { def = GlobalScope::GlobalDef(name, funbox); } @@ -3687,7 +3683,7 @@ Parser::letStatement() stmt->downScope = tc->topScopeStmt; tc->topScopeStmt = stmt; - obj->setParent(tc->blockChain()); + obj->setStaticBlockScopeChain(tc->blockChain()); blockbox->parent = tc->blockChainBox; tc->blockChainBox = blockbox; stmt->blockBox = blockbox; @@ -7156,8 +7152,10 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot) return NULL; if (!tc->compileAndGo()) { - reobj->clearParent(); - reobj->clearType(); + if (!reobj->clearParent(context)) + return NULL; + if (!reobj->clearType(context)) + return NULL; } pn->pn_objbox = tc->parser->newObjectBox(reobj); diff --git a/js/src/frontend/SemanticAnalysis.cpp b/js/src/frontend/SemanticAnalysis.cpp index 25437bcfa12c..68ab8ea54d10 100644 --- a/js/src/frontend/SemanticAnalysis.cpp +++ b/js/src/frontend/SemanticAnalysis.cpp @@ -164,7 +164,6 @@ FindFunArgs(FunctionBox *funbox, int level, FunctionBoxQueue *queue) do { ParseNode *fn = funbox->node; JS_ASSERT(fn->isArity(PN_FUNC)); - JSFunction *fun = funbox->function(); int fnlevel = level; /* @@ -245,12 +244,10 @@ FindFunArgs(FunctionBox *funbox, int level, FunctionBoxQueue *queue) /* * Finally, after we've traversed all of the current function's kids, - * minimize fun's skipmin against our accumulated skipmin. Do likewise - * with allskipmin, but minimize across funbox and all of its siblings, - * to compute our return value. + * minimize allskipmin against our accumulated skipmin. Minimize across + * funbox and all of its siblings, to compute our return value. */ if (skipmin != UpvarCookie::FREE_LEVEL) { - fun->u.i.skipmin = skipmin; if (skipmin < allskipmin) allskipmin = skipmin; } @@ -512,46 +509,6 @@ FlagHeavyweights(Definition *dn, FunctionBox *funbox, uint32 *tcflags) *tcflags |= TCF_FUN_HEAVYWEIGHT; } -static void -ConsiderUnbranding(FunctionBox *funbox) -{ - /* - * We've already recursively set our kids' kinds, which also classifies - * enclosing functions holding upvars referenced in those descendants' - * bodies. So now we can check our "methods". - * - * Despecialize from branded method-identity-based shape to shape- or - * slot-based shape if this function smells like a constructor and too many - * of its methods are *not* joinable null closures (i.e., they have one or - * more upvars fetched via the display). - */ - bool returnsExpr = !!(funbox->tcflags & TCF_RETURN_EXPR); -#if JS_HAS_EXPR_CLOSURES - { - ParseNode *pn2 = funbox->node->pn_body; - if (pn2->isKind(PNK_UPVARS)) - pn2 = pn2->pn_tree; - if (pn2->isKind(PNK_ARGSBODY)) - pn2 = pn2->last(); - if (!pn2->isKind(PNK_STATEMENTLIST)) - returnsExpr = true; - } -#endif - if (!returnsExpr) { - uintN methodSets = 0, slowMethodSets = 0; - - for (ParseNode *method = funbox->methods; method; method = method->pn_link) { - JS_ASSERT(method->isOp(JSOP_LAMBDA) || method->isOp(JSOP_LAMBDA_FC)); - ++methodSets; - if (!method->pn_funbox->joinable()) - ++slowMethodSets; - } - - if (funbox->shouldUnbrand(methodSets, slowMethodSets)) - funbox->tcflags |= TCF_FUN_UNBRAND_THIS; - } -} - static void SetFunctionKinds(FunctionBox *funbox, uint32 *tcflags, bool isDirectEval) { @@ -559,10 +516,8 @@ SetFunctionKinds(FunctionBox *funbox, uint32 *tcflags, bool isDirectEval) ParseNode *fn = funbox->node; ParseNode *pn = fn->pn_body; - if (funbox->kids) { + if (funbox->kids) SetFunctionKinds(funbox->kids, tcflags, isDirectEval); - ConsiderUnbranding(funbox); - } JSFunction *fun = funbox->function(); @@ -674,8 +629,8 @@ SetFunctionKinds(FunctionBox *funbox, uint32 *tcflags, bool isDirectEval) * must have their OWN_SHAPE flags set; the comments for * js::Bindings::extensibleParents explain why. */ -static void -MarkExtensibleScopeDescendants(FunctionBox *funbox, bool hasExtensibleParent) +static bool +MarkExtensibleScopeDescendants(JSContext *context, FunctionBox *funbox, bool hasExtensibleParent) { for (; funbox; funbox = funbox->siblings) { /* @@ -685,14 +640,20 @@ MarkExtensibleScopeDescendants(FunctionBox *funbox, bool hasExtensibleParent) */ JS_ASSERT(!funbox->bindings.extensibleParents()); - if (hasExtensibleParent) - funbox->bindings.setExtensibleParents(); + if (hasExtensibleParent) { + if (!funbox->bindings.setExtensibleParents(context)) + return false; + } if (funbox->kids) { - MarkExtensibleScopeDescendants(funbox->kids, - hasExtensibleParent || funbox->scopeIsExtensible()); + if (!MarkExtensibleScopeDescendants(context, funbox->kids, + hasExtensibleParent || funbox->scopeIsExtensible())) { + return false; + } } } + + return true; } bool @@ -703,7 +664,8 @@ frontend::AnalyzeFunctions(TreeContext *tc) return true; if (!MarkFunArgs(tc->parser->context, tc->functionList, tc->parser->functionCount)) return false; - MarkExtensibleScopeDescendants(tc->functionList, false); + if (!MarkExtensibleScopeDescendants(tc->parser->context, tc->functionList, false)) + return false; bool isDirectEval = !!tc->parser->callerFrame; SetFunctionKinds(tc->functionList, &tc->flags, isDirectEval); return true; diff --git a/js/src/gc/Barrier.h b/js/src/gc/Barrier.h index dfe508564228..461b4e14fbf1 100644 --- a/js/src/gc/Barrier.h +++ b/js/src/gc/Barrier.h @@ -271,7 +271,8 @@ typedef HeapPtr HeapPtrFunction; typedef HeapPtr HeapPtrString; typedef HeapPtr HeapPtrScript; typedef HeapPtr HeapPtrShape; -typedef HeapPtr HeapPtrConstShape; +typedef HeapPtr HeapPtrBaseShape; +typedef HeapPtr HeapPtrTypeObject; typedef HeapPtr HeapPtrXML; /* Useful for hashtables with a HeapPtr as key. */ diff --git a/js/src/gc/Statistics.cpp b/js/src/gc/Statistics.cpp index 7c3a998bd2a5..f98be20904c3 100644 --- a/js/src/gc/Statistics.cpp +++ b/js/src/gc/Statistics.cpp @@ -192,7 +192,6 @@ Statistics::beginGC(JSCompartment *comp, Reason reason) Probes::GCStart(compartment); GCCrashData crashData; - crashData.isRegen = runtime->shapeGen & SHAPE_OVERFLOW_BIT; crashData.isCompartment = !!compartment; crash::SaveCrashData(crash::JS_CRASH_TAG_GC, &crashData, sizeof(crashData)); } @@ -277,8 +276,7 @@ Statistics::endGC() if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) { (*cb)(JS_TELEMETRY_GC_REASON, triggerReason); (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, compartment ? 1 : 0); - (*cb)(JS_TELEMETRY_GC_IS_SHAPE_REGEN, - runtime->shapeGen & SHAPE_OVERFLOW_BIT ? 1 : 0); + (*cb)(JS_TELEMETRY_GC_IS_SHAPE_REGEN, 0); (*cb)(JS_TELEMETRY_GC_MS, t(PHASE_GC)); (*cb)(JS_TELEMETRY_GC_MARK_MS, t(PHASE_MARK)); (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(PHASE_SWEEP)); diff --git a/js/src/jit-test/tests/basic/bug690732.js b/js/src/jit-test/tests/basic/bug690732.js new file mode 100644 index 000000000000..bfcb728914bb --- /dev/null +++ b/js/src/jit-test/tests/basic/bug690732.js @@ -0,0 +1,4 @@ + +var o4 = Object.freeze({ + set: function(summary) {} +}); diff --git a/js/src/jit-test/tests/basic/bug699166.js b/js/src/jit-test/tests/basic/bug699166.js new file mode 100644 index 000000000000..ac4c89eb0325 --- /dev/null +++ b/js/src/jit-test/tests/basic/bug699166.js @@ -0,0 +1,7 @@ +a = "".__proto__ +b = uneval().__proto__ +for (var i = 0; i < 2; i++) { + a.__defineSetter__("valueOf", function() {}) + a + "" + delete b.valueOf +} diff --git a/js/src/jit-test/tests/basic/bug700300.js b/js/src/jit-test/tests/basic/bug700300.js new file mode 100644 index 000000000000..ccee3ecfd211 --- /dev/null +++ b/js/src/jit-test/tests/basic/bug700300.js @@ -0,0 +1,4 @@ +for (let j = 0; j < (20); ++(__lookupSetter__)) { + function g() { j; } + j++; +} diff --git a/js/src/jit-test/tests/basic/bug700501.js b/js/src/jit-test/tests/basic/bug700501.js new file mode 100644 index 000000000000..65a130987e1d --- /dev/null +++ b/js/src/jit-test/tests/basic/bug700501.js @@ -0,0 +1,13 @@ +Function.prototype.__proto__["p"] = 3 +c = [].__proto__ +c[5] = 3 +Namespace.prototype.__proto__[4] = function() {} +gc() +Function("\ + {\ + function f(d) {}\ + for each(let z in[0]) {\ + f(z)\ + }\ + }\ +")() diff --git a/js/src/jit-test/tests/basic/bug700799.js b/js/src/jit-test/tests/basic/bug700799.js new file mode 100644 index 000000000000..7a543325fa80 --- /dev/null +++ b/js/src/jit-test/tests/basic/bug700799.js @@ -0,0 +1 @@ +for (let x in [.(let(x) function() {})]) {} diff --git a/js/src/jit-test/tests/basic/bug703818.js b/js/src/jit-test/tests/basic/bug703818.js new file mode 100644 index 000000000000..be8fdaf5a1fe --- /dev/null +++ b/js/src/jit-test/tests/basic/bug703818.js @@ -0,0 +1,3 @@ +Object.defineProperty(Namespace.prototype, "toString", { + enumerable: true +}) diff --git a/js/src/jit-test/tests/basic/bug704134.js b/js/src/jit-test/tests/basic/bug704134.js new file mode 100644 index 000000000000..947a0b13bbb3 --- /dev/null +++ b/js/src/jit-test/tests/basic/bug704134.js @@ -0,0 +1,12 @@ +function f(s) { + eval(s); + return function() { + with({}) {}; + return b; + }; +} +var b = 1; +var g1 = f(""); +var g2 = f("var b = 2;"); +g1(''); +assertEq(g2(''), 2); diff --git a/js/src/jit-test/tests/basic/bug705895-1.js b/js/src/jit-test/tests/basic/bug705895-1.js new file mode 100644 index 000000000000..a22a217cbebc --- /dev/null +++ b/js/src/jit-test/tests/basic/bug705895-1.js @@ -0,0 +1,11 @@ +c = (0).__proto__ +function f(o) { + o.__proto__ = null + for (x in o) {} +} +for (i = 0; i < 9; i++) { + f(c) + Function.prototype.__proto__.__proto__ = c + for (x in Function.prototype.__proto__) {} + f(Math.__proto__) +} diff --git a/js/src/jit-test/tests/basic/bug705895-2.js b/js/src/jit-test/tests/basic/bug705895-2.js new file mode 100644 index 000000000000..445efb56a82c --- /dev/null +++ b/js/src/jit-test/tests/basic/bug705895-2.js @@ -0,0 +1,13 @@ +// |jit-test| error: TypeError +function f(o) { + for (j = 0; j < 9; j++) { + if (j) { + o.__proto__ = null + } + for (v in o) {} + } +} +for (i = 0; i < 9; i++) { + (new Boolean).__proto__.__defineGetter__("toString", function() {}) + f(Boolean.prototype) +} diff --git a/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-01.js b/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-01.js new file mode 100644 index 000000000000..b34d0d872acb --- /dev/null +++ b/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-01.js @@ -0,0 +1,9 @@ +// Check that an onExceptionUnwind hook can force a frame to return a value early. + +var g = newGlobal('new-compartment'); +var dbg = Debugger(g); +dbg.onExceptionUnwind = function (frame, exc) { + return { return:"sproon" }; +}; +g.eval("function f() { throw 'ksnife'; }"); +assertEq(g.f(), "sproon"); diff --git a/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-02.js b/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-02.js new file mode 100644 index 000000000000..8f93336472b1 --- /dev/null +++ b/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-02.js @@ -0,0 +1,10 @@ +// Check that if an onExceptionUnwind hook forces a constructor frame to +// return a primitive value, it still gets wrapped up in an object. + +var g = newGlobal('new-compartment'); +var dbg = Debugger(g); +dbg.onExceptionUnwind = function (frame, exc) { + return { return:"sproon" }; +}; +g.eval("function f() { throw 'ksnife'; }"); +assertEq(typeof new g.f, "object"); diff --git a/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-03.js b/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-03.js new file mode 100644 index 000000000000..37053a655378 --- /dev/null +++ b/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-03.js @@ -0,0 +1,11 @@ +// Check that an onExceptionUnwind hook can force a frame to throw a different exception. + +load(libdir + "asserts.js"); + +var g = newGlobal('new-compartment'); +var dbg = Debugger(g); +dbg.onExceptionUnwind = function (frame, exc) { + return { throw:"sproon" }; +}; +g.eval("function f() { throw 'ksnife'; }"); +assertThrowsValue(g.f, "sproon"); diff --git a/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-04.js b/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-04.js new file mode 100644 index 000000000000..c22216428113 --- /dev/null +++ b/js/src/jit-test/tests/debug/onExceptionUnwind-resumption-04.js @@ -0,0 +1,17 @@ +// Check that an onExceptionUnwind hook can force a frame to terminate. + +var g = newGlobal('new-compartment'); +var dbg = Debugger(g); +g.eval("function f() { throw 'ksnife'; }"); +var log = ''; +dbg.onDebuggerStatement = function (frame) { + log += 'd1'; + assertEq(frame.eval("f();"), null); + log += 'd2'; +}; +dbg.onExceptionUnwind = function (frame, exc) { + log += 'u'; + return null; +}; +g.eval("debugger;"); +assertEq(log, "d1ud2"); diff --git a/js/src/jit-test/tests/jaeger/bug563000/trap-from-add-ool.js b/js/src/jit-test/tests/jaeger/bug563000/trap-from-add-ool.js index 96c6f174fe6c..3b2417649720 100644 --- a/js/src/jit-test/tests/jaeger/bug563000/trap-from-add-ool.js +++ b/js/src/jit-test/tests/jaeger/bug563000/trap-from-add-ool.js @@ -3,7 +3,7 @@ setDebug(true); x = "notset"; function main() { /* The JSOP_STOP in main. */ - a = { valueOf: function () { trap(main, 58, "success()"); } }; + a = { valueOf: function () { trap(main, 57, "success()"); } }; b = ""; eval(); a + b; diff --git a/js/src/jit-test/tests/jaeger/bug704138.js b/js/src/jit-test/tests/jaeger/bug704138.js new file mode 100644 index 000000000000..a724350c467e --- /dev/null +++ b/js/src/jit-test/tests/jaeger/bug704138.js @@ -0,0 +1,17 @@ +function TestCase(n, d, e, a) + this.name=n; +function reportCompare (expected, actual, description) { + new TestCase +} +reportCompare(true, "isGenerator" in Function, "Function.prototype.isGenerator present"); +var p = Proxy.create({ + has : function(id) {} +}); +function test() { + Object.prototype.__proto__=null + if (new TestCase) + Object.prototype.__proto__=p +} +test(); +new TestCase; +test() diff --git a/js/src/jit-test/tests/jaeger/bug705873.js b/js/src/jit-test/tests/jaeger/bug705873.js new file mode 100644 index 000000000000..1d4d65246a9d --- /dev/null +++ b/js/src/jit-test/tests/jaeger/bug705873.js @@ -0,0 +1,7 @@ +a = [] +function f(o) { + o[5] = {} +} +for (var i = 0; i < 20; i++) { + with(a) f(a) +} diff --git a/js/src/jsanalyze.cpp b/js/src/jsanalyze.cpp index 5977692de0e0..c2105a142f5e 100644 --- a/js/src/jsanalyze.cpp +++ b/js/src/jsanalyze.cpp @@ -174,7 +174,7 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx) LifoAlloc &tla = cx->typeLifoAlloc(); unsigned length = script->length; - unsigned nargs = script->hasFunction ? script->function()->nargs : 0; + unsigned nargs = script->function() ? script->function()->nargs : 0; numSlots = TotalSlots(script); @@ -226,15 +226,16 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx) if (cx->compartment->debugMode()) usesReturnValue_ = true; + bool heavyweight = script->function() && script->function()->isHeavyweight(); + isInlineable = true; - if (script->nClosedArgs || script->nClosedVars || - (script->hasFunction && script->function()->isHeavyweight()) || + if (script->nClosedArgs || script->nClosedVars || heavyweight || script->usesEval || script->usesArguments || cx->compartment->debugMode()) { isInlineable = false; } modifiesArguments_ = false; - if (script->nClosedArgs || (script->hasFunction && script->function()->isHeavyweight())) + if (script->nClosedArgs || heavyweight) modifiesArguments_ = true; canTrackVars = true; diff --git a/js/src/jsanalyze.h b/js/src/jsanalyze.h index 7b12c5b4c707..a5ac0e7a7723 100644 --- a/js/src/jsanalyze.h +++ b/js/src/jsanalyze.h @@ -394,7 +394,7 @@ static inline uint32 ArgSlot(uint32 arg) { return 2 + arg; } static inline uint32 LocalSlot(JSScript *script, uint32 local) { - return 2 + (script->hasFunction ? script->function()->nargs : 0) + local; + return 2 + (script->function() ? script->function()->nargs : 0) + local; } static inline uint32 TotalSlots(JSScript *script) { return LocalSlot(script, 0) + script->nfixed; diff --git a/js/src/jsapi-tests/testArgumentsObject.cpp b/js/src/jsapi-tests/testArgumentsObject.cpp index 359852f50a2c..83587c56fbd5 100644 --- a/js/src/jsapi-tests/testArgumentsObject.cpp +++ b/js/src/jsapi-tests/testArgumentsObject.cpp @@ -6,8 +6,6 @@ #include "vm/Stack-inl.h" -#include "jsobjinlines.h" - using namespace js; static const char NORMAL_ZERO[] = diff --git a/js/src/jsapi-tests/testBug604087.cpp b/js/src/jsapi-tests/testBug604087.cpp index 63a43c2978ca..27b7e0b54aa8 100644 --- a/js/src/jsapi-tests/testBug604087.cpp +++ b/js/src/jsapi-tests/testBug604087.cpp @@ -8,8 +8,6 @@ #include "jsobj.h" #include "jswrapper.h" -#include "jsobjinlines.h" - struct OuterWrapper : js::Wrapper { OuterWrapper() : Wrapper(0) {} diff --git a/js/src/jsapi-tests/testConservativeGC.cpp b/js/src/jsapi-tests/testConservativeGC.cpp index 45eb898e8885..163189bce041 100644 --- a/js/src/jsapi-tests/testConservativeGC.cpp +++ b/js/src/jsapi-tests/testConservativeGC.cpp @@ -50,13 +50,8 @@ BEGIN_TEST(testConservativeGC) bool checkObjectFields(JSObject *savedCopy, JSObject *obj) { /* Ignore fields which are unstable across GCs. */ - CHECK(savedCopy->lastProp == obj->lastProp); - CHECK(savedCopy->getClass() == obj->getClass()); - CHECK(savedCopy->flags == obj->flags); - CHECK(savedCopy->newType == obj->newType); + CHECK(savedCopy->lastProperty() == obj->lastProperty()); CHECK(savedCopy->getProto() == obj->getProto()); - CHECK(savedCopy->parent == obj->parent); - CHECK(savedCopy->privateData == obj->privateData); return true; } diff --git a/js/src/jsapi-tests/testFuncCallback.cpp b/js/src/jsapi-tests/testFuncCallback.cpp index 1f7d885006c7..a78b07e86dac 100644 --- a/js/src/jsapi-tests/testFuncCallback.cpp +++ b/js/src/jsapi-tests/testFuncCallback.cpp @@ -20,8 +20,7 @@ funcTransition(const JSFunction *, if (entering > 0) { ++depth; ++enters; - if (! JS_ON_TRACE(cx)) - ++interpreted; + ++interpreted; } else { --depth; ++leaves; @@ -93,12 +92,13 @@ BEGIN_TEST(testFuncCallback_bug507012) CHECK_EQUAL(depth, 0); interpreted = enters = leaves = depth = 0; - // Check calls invoked while running on trace + // Check calls invoked while running on trace -- or now, perhaps on + // IonMonkey's equivalent, if it ever starts to exist? EXEC("function g () { ++x; }"); interpreted = enters = leaves = depth = 0; - EXEC("for (i = 0; i < 50; ++i) { g(); }"); - CHECK_EQUAL(enters, 1+50); - CHECK_EQUAL(leaves, 1+50); + EXEC("for (i = 0; i < 5000; ++i) { g(); }"); + CHECK_EQUAL(enters, 1+5000); + CHECK_EQUAL(leaves, 1+5000); CHECK_EQUAL(depth, 0); // Test nesting callbacks via JS_GetFunctionCallback() diff --git a/js/src/jsapi-tests/testIndexToString.cpp b/js/src/jsapi-tests/testIndexToString.cpp index f240b601ac72..c93410f59554 100644 --- a/js/src/jsapi-tests/testIndexToString.cpp +++ b/js/src/jsapi-tests/testIndexToString.cpp @@ -9,8 +9,6 @@ #include "jsnum.h" #include "jsstr.h" -#include "jsobjinlines.h" - #include "vm/String-inl.h" using namespace mozilla; diff --git a/js/src/jsapi-tests/testLookup.cpp b/js/src/jsapi-tests/testLookup.cpp index 2d1c4e81a46d..121a89f721a7 100644 --- a/js/src/jsapi-tests/testLookup.cpp +++ b/js/src/jsapi-tests/testLookup.cpp @@ -5,6 +5,8 @@ #include "tests.h" #include "jsfun.h" // for js::IsInternalFunctionObject +#include "jsobjinlines.h" + BEGIN_TEST(testLookup_bug522590) { // Define a function that makes method-bearing objects. @@ -26,7 +28,7 @@ BEGIN_TEST(testLookup_bug522590) JSObject *funobj = JSVAL_TO_OBJECT(r); CHECK(funobj->isFunction()); CHECK(!js::IsInternalFunctionObject(funobj)); - CHECK(funobj->getFunctionPrivate() != (JSFunction *) funobj); + CHECK(funobj->toFunction()->isClonedMethod()); return true; } diff --git a/js/src/jsapi-tests/testVersion.cpp b/js/src/jsapi-tests/testVersion.cpp index 855c4222af86..4d6d9567527f 100644 --- a/js/src/jsapi-tests/testVersion.cpp +++ b/js/src/jsapi-tests/testVersion.cpp @@ -3,6 +3,7 @@ #include "jscntxt.h" #include "jscntxtinlines.h" +#include "jsobjinlines.h" using namespace js; diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 33fd858d5bd2..4950f45f147e 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -336,11 +336,11 @@ JS_ConvertArgumentsVA(JSContext *cx, uintN argc, jsval *argv, const char *format *va_arg(ap, JSObject **) = obj; break; case 'f': - obj = js_ValueToFunctionObject(cx, sp, 0); + obj = js_ValueToFunction(cx, sp, 0); if (!obj) return JS_FALSE; *sp = OBJECT_TO_JSVAL(obj); - *va_arg(ap, JSFunction **) = obj->getFunctionPrivate(); + *va_arg(ap, JSFunction **) = obj->toFunction(); break; case 'v': *va_arg(ap, jsval *) = *sp; @@ -429,7 +429,7 @@ JS_ConvertValue(JSContext *cx, jsval v, JSType type, jsval *vp) break; case JSTYPE_FUNCTION: *vp = v; - obj = js_ValueToFunctionObject(cx, vp, JSV2F_SEARCH_STACK); + obj = js_ValueToFunction(cx, vp, JSV2F_SEARCH_STACK); ok = (obj != NULL); break; case JSTYPE_STRING: @@ -645,7 +645,6 @@ JSRuntime::JSRuntime() compartmentCallback(NULL), activityCallback(NULL), activityCallbackArg(NULL), - protoHazardShape(0), gcSystemAvailableChunkListHead(NULL), gcUserAvailableChunkListHead(NULL), gcKeepAtoms(0), @@ -672,7 +671,6 @@ JSRuntime::JSRuntime() gcPoke(false), gcMarkAndSweep(false), gcRunning(false), - gcRegenShapes(false), #ifdef JS_GC_ZEAL gcZeal_(0), gcZealFrequency(0), @@ -726,7 +724,6 @@ JSRuntime::JSRuntime() threadData(thisFromCtor()), #endif trustedPrincipals_(NULL), - shapeGen(0), wrapObjectCallback(NULL), preWrapObjectCallback(NULL), inOOMReport(0) @@ -2383,6 +2380,10 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing, name = "shape"; break; + case JSTRACE_BASE_SHAPE: + name = "base_shape"; + break; + case JSTRACE_TYPE_OBJECT: name = "type_object"; break; @@ -2411,7 +2412,7 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing, JSObject *obj = (JSObject *)thing; Class *clasp = obj->getClass(); if (clasp == &FunctionClass) { - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); if (!fun) { JS_snprintf(buf, bufsize, ""); } else if (fun != obj) { @@ -2446,6 +2447,7 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing, } case JSTRACE_SHAPE: + case JSTRACE_BASE_SHAPE: case JSTRACE_TYPE_OBJECT: break; @@ -3080,14 +3082,9 @@ JS_GetInstancePrivate(JSContext *cx, JSObject *obj, JSClass *clasp, jsval *argv) JS_PUBLIC_API(JSObject *) JS_GetPrototype(JSContext *cx, JSObject *obj) { - JSObject *proto; - CHECK_REQUEST(cx); assertSameCompartment(cx, obj); - proto = obj->getProto(); - - /* Beware ref to dead object (we may be called from obj's finalizer). */ - return proto && !proto->isNewborn() ? proto : NULL; + return obj->getProto(); } JS_PUBLIC_API(JSBool) @@ -3101,21 +3098,19 @@ JS_SetPrototype(JSContext *cx, JSObject *obj, JSObject *proto) JS_PUBLIC_API(JSObject *) JS_GetParent(JSContext *cx, JSObject *obj) { + JS_ASSERT(!obj->isInternalScope()); assertSameCompartment(cx, obj); - JSObject *parent = obj->getParent(); - - /* Beware ref to dead object (we may be called from obj's finalizer). */ - return parent && !parent->isNewborn() ? parent : NULL; + return obj->getParent(); } JS_PUBLIC_API(JSBool) JS_SetParent(JSContext *cx, JSObject *obj, JSObject *parent) { CHECK_REQUEST(cx); + JS_ASSERT(!obj->isInternalScope()); JS_ASSERT(parent || !obj->getParent()); assertSameCompartment(cx, obj, parent); - obj->setParent(parent); - return true; + return obj->setParent(cx, parent); } JS_PUBLIC_API(JSObject *) @@ -3131,8 +3126,7 @@ JS_GetConstructor(JSContext *cx, JSObject *proto) if (!proto->getProperty(cx, cx->runtime->atomState.constructorAtom, &cval)) return NULL; } - JSObject *funobj; - if (!IsFunctionObject(cval, &funobj)) { + if (!IsFunctionObject(cval)) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NO_CONSTRUCTOR, proto->getClass()->name); return NULL; @@ -3206,14 +3200,13 @@ JS_NewObject(JSContext *cx, JSClass *jsclasp, JSObject *proto, JSObject *parent) JS_ASSERT(clasp != &FunctionClass); JS_ASSERT(!(clasp->flags & JSCLASS_IS_GLOBAL)); - if (proto) - proto->getNewType(cx, NULL, /* markUnknown = */ true); + if (proto && !proto->setNewTypeUnknown(cx)) + return NULL; - JSObject *obj = NewNonFunction(cx, clasp, proto, parent); + JSObject *obj = NewObjectWithClassProto(cx, clasp, proto, parent); if (obj) { if (clasp->ext.equality) MarkTypeObjectFlags(cx, obj, OBJECT_FLAG_SPECIAL_EQUALITY); - obj->syncSpecialEquality(); MarkTypeObjectUnknownProperties(cx, obj->type()); } @@ -3235,11 +3228,9 @@ JS_NewObjectWithGivenProto(JSContext *cx, JSClass *jsclasp, JSObject *proto, JSO JS_ASSERT(clasp != &FunctionClass); JS_ASSERT(!(clasp->flags & JSCLASS_IS_GLOBAL)); - JSObject *obj = NewNonFunction(cx, clasp, proto, parent); - if (obj) { - obj->syncSpecialEquality(); + JSObject *obj = NewObjectWithGivenProto(cx, clasp, proto, parent); + if (obj) MarkTypeObjectUnknownProperties(cx, obj->type()); - } return obj; } @@ -3349,13 +3340,13 @@ LookupResult(JSContext *cx, JSObject *obj, JSObject *obj2, jsid id, Shape *shape = (Shape *) prop; if (shape->isMethod()) { - vp->setObject(shape->methodObject()); + vp->setObject(*obj2->nativeGetMethod(shape)); return !!obj2->methodReadBarrier(cx, *shape, vp); } /* Peek at the native property's slot value, without doing a Get. */ - if (obj2->containsSlot(shape->slot)) { - *vp = obj2->nativeGetSlot(shape->slot); + if (shape->hasSlot()) { + *vp = obj2->nativeGetSlot(shape->slot()); return true; } } else { @@ -3654,12 +3645,10 @@ JS_DefineObject(JSContext *cx, JSObject *obj, const char *name, JSClass *jsclasp if (!clasp) clasp = &ObjectClass; /* default class is Object */ - JSObject *nobj = NewObject(cx, clasp, proto, obj); + JSObject *nobj = NewObjectWithClassProto(cx, clasp, proto, obj); if (!nobj) return NULL; - nobj->syncSpecialEquality(); - if (!DefineProperty(cx, obj, name, ObjectValue(*nobj), NULL, NULL, attrs, 0, 0)) return NULL; @@ -3726,12 +3715,12 @@ GetPropertyDescriptorById(JSContext *cx, JSObject *obj, jsid id, uintN flags, if (shape->isMethod()) { desc->getter = JS_PropertyStub; desc->setter = JS_StrictPropertyStub; - desc->value.setObject(shape->methodObject()); + desc->value.setObject(*obj2->nativeGetMethod(shape)); } else { desc->getter = shape->getter(); desc->setter = shape->setter(); - if (obj2->containsSlot(shape->slot)) - desc->value = obj2->nativeGetSlot(shape->slot); + if (shape->hasSlot()) + desc->value = obj2->nativeGetSlot(shape->slot()); else desc->value.setUndefined(); } @@ -4143,7 +4132,7 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj) CHECK_REQUEST(cx); assertSameCompartment(cx, obj); - iterobj = NewNonFunction(cx, &prop_iter_class, NULL, obj); + iterobj = NewObjectWithClassProto(cx, &prop_iter_class, NULL, obj); if (!iterobj) return NULL; @@ -4191,11 +4180,11 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp) shape = shape->previous(); if (!shape->previous()) { - JS_ASSERT(JSID_IS_EMPTY(shape->propid)); + JS_ASSERT(shape->isEmptyShape()); *idp = JSID_VOID; } else { iterobj->setPrivate(const_cast(shape->previous())); - *idp = shape->propid; + *idp = shape->propid(); } } else { /* Non-native case: use the ida enumerated when iterobj was created. */ @@ -4382,9 +4371,9 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent) return NULL; } - JSFunction *fun = funobj->getFunctionPrivate(); + JSFunction *fun = funobj->toFunction(); if (!fun->isInterpreted()) - return CloneFunctionObject(cx, fun, parent); + return CloneFunctionObject(cx, fun, parent, fun->getAllocKind()); if (fun->script()->compileAndGo) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, @@ -4393,7 +4382,7 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent) } if (!fun->isFlatClosure()) - return CloneFunctionObject(cx, fun, parent); + return CloneFunctionObject(cx, fun, parent, fun->getAllocKind()); /* * A flat closure carries its own environment, so why clone it? In case @@ -4424,13 +4413,13 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent) JSMSG_BAD_CLONE_FUNOBJ_SCOPE); return NULL; } - obj = obj->getParent(); + obj = obj->scopeChain(); } Value v; - if (!obj->getGeneric(cx, r.front().propid, &v)) + if (!obj->getGeneric(cx, r.front().propid(), &v)) return NULL; - clone->setFlatClosureUpvar(i, v); + clone->toFunction()->setFlatClosureUpvar(i, v); } return clone; @@ -4477,14 +4466,15 @@ JS_IsNativeFunction(JSObject *funobj, JSNative call) { if (!funobj->isFunction()) return false; - JSFunction *fun = funobj->getFunctionPrivate(); + JSFunction *fun = funobj->toFunction(); return fun->isNative() && fun->native() == call; } JSBool js_generic_native_method_dispatcher(JSContext *cx, uintN argc, Value *vp) { - JSFunctionSpec *fs = (JSFunctionSpec *) vp->toObject().getReservedSlot(0).toPrivate(); + JSFunctionSpec *fs = (JSFunctionSpec *) + vp->toObject().toFunction()->getExtendedSlot(0).toPrivate(); JS_ASSERT((fs->flags & JSFUN_GENERIC_NATIVE) != 0); if (argc < 1) { @@ -4539,7 +4529,8 @@ JS_DefineFunctions(JSContext *cx, JSObject *obj, JSFunctionSpec *fs) fun = js_DefineFunction(cx, ctor, ATOM_TO_JSID(atom), js_generic_native_method_dispatcher, fs->nargs + 1, - flags); + flags, + JSFunction::ExtendedFinalizeKind); if (!fun) return JS_FALSE; @@ -4547,9 +4538,7 @@ JS_DefineFunctions(JSContext *cx, JSObject *obj, JSFunctionSpec *fs) * As jsapi.h notes, fs must point to storage that lives as long * as fun->object lives. */ - Value priv = PrivateValue(fs); - if (!js_SetReservedSlot(cx, fun, 0, priv)) - return JS_FALSE; + fun->setExtendedSlot(0, PrivateValue(fs)); } fun = js_DefineFunction(cx, obj, ATOM_TO_JSID(atom), fs->call, fs->nargs, flags); @@ -5314,6 +5303,20 @@ JS_RestoreFrameChain(JSContext *cx) cx->stack.restoreFrameChain(); } +#ifdef MOZ_TRACE_JSCALLS +JS_PUBLIC_API(void) +JS_SetFunctionCallback(JSContext *cx, JSFunctionCallback fcb) +{ + cx->functionCallback = fcb; +} + +JS_PUBLIC_API(JSFunctionCallback) +JS_GetFunctionCallback(JSContext *cx) +{ + return cx->functionCallback; +} +#endif + /************************************************************************/ JS_PUBLIC_API(JSString *) JS_NewStringCopyN(JSContext *cx, const char *s, size_t n) diff --git a/js/src/jsapi.h b/js/src/jsapi.h index e7b77e083d2c..58e392bf496d 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1118,6 +1118,14 @@ typedef JSBool typedef void (* JSErrorReporter)(JSContext *cx, const char *message, JSErrorReport *report); +#ifdef MOZ_TRACE_JSCALLS +typedef void +(* JSFunctionCallback)(const JSFunction *fun, + const JSScript *scr, + const JSContext *cx, + int entering); +#endif + /* * Possible exception types. These types are part of a JSErrorFormatString * structure. They define which error to throw in case of a runtime error. @@ -3146,8 +3154,7 @@ struct JSClass { #define JSCLASS_NEW_ENUMERATE (1<<1) /* has JSNewEnumerateOp hook */ #define JSCLASS_NEW_RESOLVE (1<<2) /* has JSNewResolveOp hook */ #define JSCLASS_PRIVATE_IS_NSISUPPORTS (1<<3) /* private is (nsISupports *) */ -#define JSCLASS_CONCURRENT_FINALIZER (1<<4) /* finalize is called on background thread */ -#define JSCLASS_NEW_RESOLVE_GETS_START (1<<5) /* JSNewResolveOp gets starting +#define JSCLASS_NEW_RESOLVE_GETS_START (1<<4) /* JSNewResolveOp gets starting object in prototype chain passed in via *objp in/out parameter */ @@ -4181,6 +4188,23 @@ JS_SaveFrameChain(JSContext *cx); extern JS_PUBLIC_API(void) JS_RestoreFrameChain(JSContext *cx); +#ifdef MOZ_TRACE_JSCALLS +/* + * The callback is expected to be quick and noninvasive. It should not + * trigger interrupts, turn on debugging, or produce uncaught JS + * exceptions. The state of the stack and registers in the context + * cannot be relied upon, since this callback may be invoked directly + * from either JIT. The 'entering' field means we are entering a + * function if it is positive, leaving a function if it is zero or + * negative. + */ +extern JS_PUBLIC_API(void) +JS_SetFunctionCallback(JSContext *cx, JSFunctionCallback fcb); + +extern JS_PUBLIC_API(JSFunctionCallback) +JS_GetFunctionCallback(JSContext *cx); +#endif /* MOZ_TRACE_JSCALLS */ + /************************************************************************/ /* diff --git a/js/src/jsarray.cpp b/js/src/jsarray.cpp index d066fa1cd570..c232199e3f39 100644 --- a/js/src/jsarray.cpp +++ b/js/src/jsarray.cpp @@ -281,10 +281,10 @@ JSObject::willBeSparseDenseArray(uintN requiredCapacity, uintN newElementsHint) JS_ASSERT(isDenseArray()); JS_ASSERT(requiredCapacity > MIN_SPARSE_INDEX); - uintN cap = numSlots(); + uintN cap = getDenseArrayCapacity(); JS_ASSERT(requiredCapacity >= cap); - if (requiredCapacity >= JSObject::NSLOTS_LIMIT) + if (requiredCapacity >= JSObject::NELEMENTS_LIMIT) return true; uintN minimalDenseCount = requiredCapacity / 4; @@ -352,7 +352,7 @@ JSObject::arrayGetOwnDataElement(JSContext *cx, size_t i, Value *vp) if (!shape || !shape->isDataDescriptor()) vp->setMagic(JS_ARRAY_HOLE); else - *vp = getSlot(shape->slot); + *vp = getSlot(shape->slot()); return true; } @@ -634,9 +634,7 @@ array_length_setter(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value if (oldinit > newlen) obj->setDenseArrayInitializedLength(newlen); if (oldcap > newlen) - obj->shrinkDenseArrayElements(cx, newlen); - if (oldinit > newlen && !cx->typeInferenceEnabled()) - obj->backfillDenseArrayHoles(cx); + obj->shrinkElements(cx, newlen); } else if (oldlen - newlen < (1 << 24)) { do { --oldlen; @@ -1219,7 +1217,7 @@ array_fix(JSContext *cx, JSObject *obj, bool *success, AutoIdVector *props) Class js::ArrayClass = { "Array", - Class::NON_NATIVE | JSCLASS_HAS_PRIVATE | JSCLASS_HAS_CACHED_PROTO(JSProto_Array), + Class::NON_NATIVE | JSCLASS_HAS_CACHED_PROTO(JSProto_Array), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ @@ -1277,7 +1275,6 @@ Class js::ArrayClass = { Class js::SlowArrayClass = { "Array", - JSCLASS_HAS_PRIVATE | JSCLASS_HAS_CACHED_PROTO(JSProto_Array), slowarray_addProperty, JS_PropertyStub, /* delProperty */ @@ -1288,12 +1285,36 @@ Class js::SlowArrayClass = { JS_ConvertStub }; +bool +JSObject::allocateSlowArrayElements(JSContext *cx) +{ + JS_ASSERT(hasClass(&js::SlowArrayClass)); + JS_ASSERT(elements == emptyObjectElements); + + ObjectElements *header = cx->new_(0, 0); + if (!header) + return false; + + elements = header->elements(); + return true; +} + static bool AddLengthProperty(JSContext *cx, JSObject *obj) { + /* + * Add the 'length' property for a newly created or converted slow array, + * and update the elements to be an empty array owned by the object. + * The shared emptyObjectElements singleton cannot be used for slow arrays, + * as accesses to 'length' will use the elements header. + */ + const jsid lengthId = ATOM_TO_JSID(cx->runtime->atomState.lengthAtom); JS_ASSERT(!obj->nativeLookup(cx, lengthId)); + if (!obj->allocateSlowArrayElements(cx)) + return false; + return obj->addProperty(cx, lengthId, array_length_getter, array_length_setter, SHAPE_INVALID_SLOT, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0); } @@ -1309,62 +1330,50 @@ JSObject::makeDenseArraySlow(JSContext *cx) MarkTypeObjectFlags(cx, this, OBJECT_FLAG_NON_PACKED_ARRAY | OBJECT_FLAG_NON_DENSE_ARRAY); - markDenseArrayNotPacked(cx); + + uint32 arrayCapacity = getDenseArrayCapacity(); + uint32 arrayInitialized = getDenseArrayInitializedLength(); + + /* + * Get an allocated array of the existing elements, evicting from the fixed + * slots if necessary. + */ + if (!hasDynamicElements()) { + if (!growElements(cx, arrayCapacity)) + return false; + JS_ASSERT(hasDynamicElements()); + } /* * Save old map now, before calling InitScopeForObject. We'll have to undo * on error. This is gross, but a better way is not obvious. Note: the * exact contents of the array are not preserved on error. */ - js::Shape *oldMap = lastProp; + js::Shape *oldShape = lastProperty(); /* Create a native scope. */ gc::AllocKind kind = getAllocKind(); - js::EmptyShape *empty = InitScopeForObject(cx, this, &SlowArrayClass, - getProto()->getNewType(cx), kind); - if (!empty) + Shape *shape = EmptyShape::getInitialShape(cx, &SlowArrayClass, getProto(), + oldShape->getObjectParent(), kind); + if (!shape) return false; - setMap(empty); + this->shape_ = shape; - backfillDenseArrayHoles(cx); + /* Take ownership of the dense elements, reset to an empty dense array. */ + HeapValue *elems = elements; + elements = emptyObjectElements; - uint32 arrayCapacity = getDenseArrayCapacity(); - uint32 arrayInitialized = getDenseArrayInitializedLength(); - - /* - * Adjust the slots to account for the different layout between dense - * arrays and other objects. The slots must be dynamic, and the fixed slots - * are now available for newly added properties. - */ - if (denseArrayHasInlineSlots()) { - if (!allocSlots(cx, numSlots())) { - setMap(oldMap); - return false; - } - JS_ASSERT(!denseArrayHasInlineSlots()); - } - capacity = numFixedSlots() + arrayCapacity; - clasp = &SlowArrayClass; - - /* - * Root all values in the array during conversion, as SlowArrayClass only - * protects up to its slot span. - */ - AutoValueArray autoArray(cx, Valueify(slots), arrayInitialized); - - /* The initialized length is used iff this is a dense array. */ - initializedLength() = 0; - JS_ASSERT(newType == NULL); + /* Root all values in the array during conversion. */ + AutoValueArray autoArray(cx, (Value *) elems, arrayInitialized); /* * Begin with the length property to share more of the property tree. * The getter/setter here will directly access the object's private value. */ if (!AddLengthProperty(cx, this)) { - setMap(oldMap); - capacity = arrayCapacity; - initializedLength() = arrayInitialized; - clasp = &ArrayClass; + this->shape_ = oldShape; + cx->free_(getElementsHeader()); + elements = elems; return false; } @@ -1373,37 +1382,30 @@ JSObject::makeDenseArraySlow(JSContext *cx) * remove holes, so that shapes use successive slots (as for other objects). */ uint32 next = 0; - for (uint32 i = 0; i < arrayCapacity; i++) { + for (uint32 i = 0; i < arrayInitialized; i++) { /* Dense array indexes can always fit in a jsid. */ jsid id; JS_ALWAYS_TRUE(ValueToId(cx, Int32Value(i), &id)); - if (slots[i].isMagic(JS_ARRAY_HOLE)) + if (elems[i].isMagic(JS_ARRAY_HOLE)) continue; - /* - * No barrier is needed here because the set of reachable objects before - * and after slowification is the same. During slowification, the - * autoArray rooter guarantees that all slots will be marked. - * - * It's important that we avoid a barrier here because the fixed slots - * of a dense array can be garbage; a write barrier after the switch to - * a slow array could cause a crash. - */ - initSlotUnchecked(next, slots[i]); - if (!addDataProperty(cx, id, next, JSPROP_ENUMERATE)) { - setMap(oldMap); - capacity = arrayCapacity; - initializedLength() = arrayInitialized; - clasp = &ArrayClass; + this->shape_ = oldShape; + cx->free_(getElementsHeader()); + elements = elems; return false; } + initSlot(next, elems[i]); + next++; } - clearSlotRange(next, capacity - next); + ObjectElements *oldheader = ObjectElements::fromElements(elems); + + getElementsHeader()->length = oldheader->length; + cx->free_(oldheader); return true; } @@ -1838,13 +1840,10 @@ InitArrayObject(JSContext *cx, JSObject *obj, jsuint length, const Value *vector return false; /* Avoid ensureDenseArrayElements to skip sparse array checks there. */ - if (!obj->ensureSlots(cx, length)) + if (!obj->ensureElements(cx, length)) return false; - if (cx->typeInferenceEnabled()) - obj->setDenseArrayInitializedLength(length); - else - obj->backfillDenseArrayHoles(cx); + obj->setDenseArrayInitializedLength(length); bool hole = false; for (jsuint i = 0; i < length; i++) { @@ -2301,11 +2300,10 @@ NewbornArrayPushImpl(JSContext *cx, JSObject *obj, const Value &v) JS_ASSERT(obj->isDenseArray()); JS_ASSERT(length <= obj->getDenseArrayCapacity()); - if (length == obj->getDenseArrayCapacity() && !obj->ensureSlots(cx, length + 1)) + if (!obj->ensureElements(cx, length + 1)) return false; - if (cx->typeInferenceEnabled()) - obj->setDenseArrayInitializedLength(length + 1); + obj->setDenseArrayInitializedLength(length + 1); obj->setDenseArrayLength(length + 1); obj->initDenseArrayElementWithType(cx, length, v); return true; @@ -2376,7 +2374,7 @@ array_pop_dense(JSContext *cx, JSObject* obj, CallArgs &args) if (!hole && DeleteArrayElement(cx, obj, index, true) < 0) return JS_FALSE; - if (cx->typeInferenceEnabled() && obj->getDenseArrayInitializedLength() > index) + if (obj->getDenseArrayInitializedLength() > index) obj->setDenseArrayInitializedLength(index); obj->setArrayLength(cx, index); @@ -2438,10 +2436,7 @@ js::array_shift(JSContext *cx, uintN argc, Value *vp) if (args.rval().isMagic(JS_ARRAY_HOLE)) args.rval().setUndefined(); obj->moveDenseArrayElements(0, 1, length); - if (cx->typeInferenceEnabled()) - obj->setDenseArrayInitializedLength(obj->getDenseArrayInitializedLength() - 1); - else - obj->setDenseArrayElement(length, MagicValue(JS_ARRAY_HOLE)); + obj->setDenseArrayInitializedLength(obj->getDenseArrayInitializedLength() - 1); obj->setArrayLength(cx, length); if (!js_SuppressDeletedProperty(cx, obj, INT_TO_JSID(length))) return JS_FALSE; @@ -2543,7 +2538,7 @@ TryReuseArrayType(JSObject *obj, JSObject *nobj) * and has the same prototype. */ JS_ASSERT(nobj->isDenseArray()); - JS_ASSERT(nobj->type() == nobj->getProto()->newType); + JS_ASSERT(nobj->getProto()->hasNewType(nobj->type())); if (obj->isArray() && !obj->hasSingletonType() && obj->getProto() == nobj->getProto()) nobj->setType(obj->type()); @@ -2693,7 +2688,7 @@ array_splice(JSContext *cx, uintN argc, Value *vp) obj->setDenseArrayInitializedLength(finalLength); /* Steps 12(c)-(d). */ - obj->shrinkDenseArrayElements(cx, finalLength); + obj->shrinkElements(cx, finalLength); /* Fix running enumerators for the deleted items. */ if (!js_SuppressDeletedElements(cx, obj, finalLength, len)) @@ -2808,7 +2803,7 @@ mjit::stubs::ArrayConcatTwoArrays(VMFrame &f) /* No overflow here due to nslots limit. */ uint32 len = initlen1 + initlen2; - if (!result->ensureSlots(f.cx, len)) + if (!result->ensureElements(f.cx, len)) THROW(); JS_ASSERT(!result->getDenseArrayInitializedLength()); @@ -2846,8 +2841,6 @@ js::array_concat(JSContext *cx, uintN argc, Value *vp) return JS_FALSE; TryReuseArrayType(aobj, nobj); nobj->setArrayLength(cx, length); - if (!aobj->isPackedDenseArray()) - nobj->markDenseArrayNotPacked(cx); vp->setObject(*nobj); if (argc == 0) return JS_TRUE; @@ -2952,8 +2945,6 @@ array_slice(JSContext *cx, uintN argc, Value *vp) if (!nobj) return JS_FALSE; TryReuseArrayType(obj, nobj); - if (!obj->isPackedDenseArray()) - nobj->markDenseArrayNotPacked(cx); args.rval().setObject(*nobj); return JS_TRUE; } @@ -3609,8 +3600,14 @@ js_InitArrayClass(JSContext *cx, JSObject *obj) if (!ctor) return NULL; - /* The default 'new' object for Array.prototype has unknown properties. */ - arrayProto->getNewType(cx, NULL, /* markUnknown = */ true); + /* + * The default 'new' type of Array.prototype is required by type inference + * to have unknown properties, to simplify handling of e.g. heterogenous + * arrays in JSON and script literals and allows setDenseArrayElement to + * be used without updating the indexed type set for such default arrays. + */ + if (!arrayProto->setNewTypeUnknown(cx)) + return NULL; if (!LinkConstructorAndPrototype(cx, ctor, arrayProto)) return NULL; @@ -3632,34 +3629,78 @@ js_InitArrayClass(JSContext *cx, JSObject *obj) */ namespace js { +static inline bool +EnsureNewArrayElements(JSContext *cx, JSObject *obj, jsuint length) +{ + /* + * If ensureElements creates dynamically allocated slots, then having + * fixedSlots is a waste. + */ + DebugOnly cap = obj->getDenseArrayCapacity(); + + if (!obj->ensureElements(cx, length)) + return false; + + JS_ASSERT_IF(cap, !obj->hasDynamicElements()); + + return true; +} + template static JS_ALWAYS_INLINE JSObject * NewArray(JSContext *cx, jsuint length, JSObject *proto) { - JS_ASSERT_IF(proto, proto->isArray()); + gc::AllocKind kind = GuessArrayGCKind(length); - gc::AllocKind kind = GuessObjectGCKind(length, true); - JSObject *obj = detail::NewObject(cx, &ArrayClass, proto, NULL, kind); +#ifdef JS_THREADSAFE + JS_ASSERT(CanBeFinalizedInBackground(kind, &ArrayClass)); + kind = GetBackgroundAllocKind(kind); +#endif + + GlobalObject *parent = GetCurrentGlobal(cx); + + NewObjectCache &cache = cx->compartment->newObjectCache; + + NewObjectCache::EntryIndex entry = -1; + if (cache.lookupGlobal(&ArrayClass, parent, kind, &entry)) { + JSObject *obj = cache.newObjectFromHit(cx, entry); + if (!obj) + return NULL; + /* Fixup the elements pointer and length, which may be incorrect. */ + obj->setFixedElements(); + obj->setArrayLength(cx, length); + if (allocateCapacity && !EnsureNewArrayElements(cx, obj, length)) + return NULL; + return obj; + } + + if (!proto && !FindProto(cx, &ArrayClass, parent, &proto)) + return NULL; + + types::TypeObject *type = proto->getNewType(cx); + if (!type) + return NULL; + + /* + * Get a shape with zero fixed slots, regardless of the size class. + * See JSObject::createDenseArray. + */ + Shape *shape = EmptyShape::getInitialShape(cx, &ArrayClass, proto, + proto->getParent(), gc::FINALIZE_OBJECT0); + if (!shape) + return NULL; + + JSObject* obj = JSObject::createDenseArray(cx, kind, shape, type, length); if (!obj) return NULL; - obj->setArrayLength(cx, length); + if (entry != -1) + cache.fillGlobal(entry, &ArrayClass, parent, kind, obj); - if (!cx->typeInferenceEnabled()) { - obj->markDenseArrayNotPacked(cx); - obj->backfillDenseArrayHoles(cx); - } - - if (allocateCapacity) { - /* If ensureSlots creates dynamically allocated slots, then having fixedSlots is a waste. */ - DebugOnly oldSlots = obj->numSlots(); - - if (!obj->ensureSlots(cx, length)) - return NULL; - - JS_ASSERT_IF(obj->numFixedSlots(), oldSlots == obj->numSlots()); - } + if (allocateCapacity && !EnsureNewArrayElements(cx, obj, length)) + return NULL; + Probes::createObject(cx, obj); return obj; } @@ -3710,8 +3751,7 @@ NewDenseCopiedArray(JSContext *cx, uint32 length, const Value *vp, JSObject *pro JS_ASSERT(obj->getDenseArrayCapacity() >= length); - if (cx->typeInferenceEnabled()) - obj->setDenseArrayInitializedLength(vp ? length : 0); + obj->setDenseArrayInitializedLength(vp ? length : 0); if (vp) obj->initDenseArrayElements(0, vp, length); @@ -3722,7 +3762,7 @@ NewDenseCopiedArray(JSContext *cx, uint32 length, const Value *vp, JSObject *pro JSObject * NewSlowEmptyArray(JSContext *cx) { - JSObject *obj = NewNonFunction(cx, &SlowArrayClass, NULL, NULL); + JSObject *obj = NewBuiltinClassInstance(cx, &SlowArrayClass); if (!obj || !AddLengthProperty(cx, obj)) return NULL; diff --git a/js/src/jsarray.h b/js/src/jsarray.h index c226fcbd27cb..0213a0e43606 100644 --- a/js/src/jsarray.h +++ b/js/src/jsarray.h @@ -51,20 +51,6 @@ /* Small arrays are dense, no matter what. */ const uintN MIN_SPARSE_INDEX = 256; -inline uint32 -JSObject::getDenseArrayInitializedLength() -{ - JS_ASSERT(isDenseArray()); - return initializedLength(); -} - -inline bool -JSObject::isPackedDenseArray() -{ - JS_ASSERT(isDenseArray()); - return flags & PACKED_ARRAY; -} - namespace js { /* 2^32-2, inclusive */ const uint32 MAX_ARRAY_INDEX = 4294967294u; diff --git a/js/src/jsarrayinlines.h b/js/src/jsarrayinlines.h index c347f1751802..659bfd499991 100644 --- a/js/src/jsarrayinlines.h +++ b/js/src/jsarrayinlines.h @@ -43,31 +43,11 @@ #include "jsinferinlines.h" #include "jsobjinlines.h" -inline void -JSObject::setDenseArrayInitializedLength(uint32 length) -{ - JS_ASSERT(isDenseArray()); - JS_ASSERT(length <= getDenseArrayCapacity()); - uint32 cur = initializedLength(); - prepareSlotRangeForOverwrite(length, cur); - initializedLength() = length; -} - inline void JSObject::markDenseArrayNotPacked(JSContext *cx) { JS_ASSERT(isDenseArray()); - if (flags & PACKED_ARRAY) { - flags ^= PACKED_ARRAY; - MarkTypeObjectFlags(cx, this, js::types::OBJECT_FLAG_NON_PACKED_ARRAY); - } -} - -inline void -JSObject::backfillDenseArrayHoles(JSContext *cx) -{ - /* Ensure an array's elements are fully initialized. */ - ensureDenseArrayInitializedLength(cx, getDenseArrayCapacity(), 0); + MarkTypeObjectFlags(cx, this, js::types::OBJECT_FLAG_NON_PACKED_ARRAY); } inline void @@ -78,13 +58,14 @@ JSObject::ensureDenseArrayInitializedLength(JSContext *cx, uint32 index, uint32 * mark the elements through 'index + extra' as initialized in preparation * for a write. */ - JS_ASSERT(index + extra <= capacity); - if (initializedLength() < index) + JS_ASSERT(index + extra <= getDenseArrayCapacity()); + uint32 &initlen = getElementsHeader()->initializedLength; + if (initlen < index) markDenseArrayNotPacked(cx); - if (initializedLength() < index + extra) { - js::InitValueRange(slots + initializedLength(), index + extra - initializedLength(), true); - initializedLength() = index + extra; + if (initlen < index + extra) { + js::InitValueRange(elements + initlen, index + extra - initlen, true); + initlen = index + extra; } } @@ -93,13 +74,7 @@ JSObject::ensureDenseArrayElements(JSContext *cx, uintN index, uintN extra) { JS_ASSERT(isDenseArray()); - uintN currentCapacity = numSlots(); - - /* - * Don't take excessive slow paths when inference is disabled, due to - * uninitialized slots between initializedLength and capacity. - */ - JS_ASSERT_IF(!cx->typeInferenceEnabled(), currentCapacity == getDenseArrayInitializedLength()); + uintN currentCapacity = getDenseArrayCapacity(); uintN requiredCapacity; if (extra == 1) { @@ -133,7 +108,7 @@ JSObject::ensureDenseArrayElements(JSContext *cx, uintN index, uintN extra) willBeSparseDenseArray(requiredCapacity, extra)) { return ED_SPARSE; } - if (!growSlots(cx, requiredCapacity)) + if (!growElements(cx, requiredCapacity)) return ED_FAILED; ensureDenseArrayInitializedLength(cx, index, extra); diff --git a/js/src/jscell.h b/js/src/jscell.h index 2ec7ae8afdbf..29443c7b58ba 100644 --- a/js/src/jscell.h +++ b/js/src/jscell.h @@ -65,10 +65,9 @@ enum AllocKind { FINALIZE_OBJECT16, FINALIZE_OBJECT16_BACKGROUND, FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND, - FINALIZE_FUNCTION, - FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION, FINALIZE_SCRIPT, FINALIZE_SHAPE, + FINALIZE_BASE_SHAPE, FINALIZE_TYPE_OBJECT, #if JS_HAS_XML_SUPPORT FINALIZE_XML, @@ -81,7 +80,6 @@ enum AllocKind { static const unsigned FINALIZE_LIMIT = FINALIZE_LAST + 1; static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT_LAST + 1; -static const unsigned FINALIZE_FUNCTION_AND_OBJECT_LIMIT = FINALIZE_FUNCTION_AND_OBJECT_LAST + 1; /* * Live objects are marked black. How many other additional colors are available diff --git a/js/src/jsclass.h b/js/src/jsclass.h index 674a4d8591a6..ff9e18d28344 100644 --- a/js/src/jsclass.h +++ b/js/src/jsclass.h @@ -360,6 +360,10 @@ struct Class bool isNative() const { return !(flags & NON_NATIVE); } + + bool hasPrivate() const { + return !!(flags & JSCLASS_HAS_PRIVATE); + } }; JS_STATIC_ASSERT(offsetof(JSClass, name) == offsetof(Class, name)); diff --git a/js/src/jscntxt.cpp b/js/src/jscntxt.cpp index 428510f2048f..d831ebdf371a 100644 --- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -1189,7 +1189,7 @@ js_ReportMissingArg(JSContext *cx, const Value &v, uintN arg) JS_snprintf(argbuf, sizeof argbuf, "%u", arg); bytes = NULL; if (IsFunctionObject(v)) { - atom = v.toObject().getFunctionPrivate()->atom; + atom = v.toObject().toFunction()->atom; bytes = DecompileValueGenerator(cx, JSDVG_SEARCH_STACK, v, atom); if (!bytes) diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 1cfee5286f04..e872b85eb090 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -394,20 +394,6 @@ struct JSRuntime JSActivityCallback activityCallback; void *activityCallbackArg; - /* - * Shape regenerated whenever a prototype implicated by an "add property" - * property cache fill and induced trace guard has a readonly property or a - * setter defined on it. This number proxies for the shapes of all objects - * along the prototype chain of all objects in the runtime on which such an - * add-property result has been cached/traced. - * - * See bug 492355 for more details. - * - * This comes early in JSRuntime to minimize the immediate format used by - * trace-JITted code that reads it. - */ - uint32 protoHazardShape; - /* Garbage collector state, used by jsgc.c. */ /* @@ -485,7 +471,6 @@ struct JSRuntime bool gcPoke; bool gcMarkAndSweep; bool gcRunning; - bool gcRegenShapes; /* * These options control the zealousness of the GC. The fundamental values @@ -664,21 +649,6 @@ struct JSRuntime void setTrustedPrincipals(JSPrincipals *p) { trustedPrincipals_ = p; } JSPrincipals *trustedPrincipals() const { return trustedPrincipals_; } - /* - * Object shape (property cache structural type) identifier generator. - * - * Type 0 stands for the empty scope, and must not be regenerated due to - * uint32 wrap-around. Since js_GenerateShape (in jsinterp.cpp) uses - * atomic pre-increment, the initial value for the first typed non-empty - * scope will be 1. - * - * If this counter overflows into SHAPE_OVERFLOW_BIT (in jsinterp.h), the - * cache is disabled, to avoid aliasing two different types. It stays - * disabled until a triggered GC at some later moment compresses live - * types, minimizing rt->shapeGen in the process. - */ - volatile uint32 shapeGen; - /* Literal table maintained by jsatom.c functions. */ JSAtomState atomState; @@ -2135,29 +2105,6 @@ enum FrameExpandKind { FRAME_EXPAND_ALL = 1 }; -static JS_INLINE JSBool -js_IsPropertyCacheDisabled(JSContext *cx) -{ - return cx->runtime->shapeGen >= js::SHAPE_OVERFLOW_BIT; -} - -static JS_INLINE uint32 -js_RegenerateShapeForGC(JSRuntime *rt) -{ - JS_ASSERT(rt->gcRunning); - JS_ASSERT(rt->gcRegenShapes); - - /* - * Under the GC, compared with js_GenerateShape, we don't need to use - * atomic increments but we still must make sure that after an overflow - * the shape stays such. - */ - uint32 shape = rt->shapeGen; - shape = (shape + 1) | (shape & js::SHAPE_OVERFLOW_BIT); - rt->shapeGen = shape; - return shape; -} - namespace js { /************************************************************************/ diff --git a/js/src/jscntxtinlines.h b/js/src/jscntxtinlines.h index 2e5d6fa539af..c929c44d6b64 100644 --- a/js/src/jscntxtinlines.h +++ b/js/src/jscntxtinlines.h @@ -334,7 +334,7 @@ CallJSNativeConstructor(JSContext *cx, Native native, const CallArgs &args) JS_ASSERT_IF(native != FunctionProxyClass.construct && native != CallableObjectClass.construct && native != js::CallOrConstructBoundFunction && - (!callee.isFunction() || callee.getFunctionPrivate()->u.n.clasp != &ObjectClass), + (!callee.isFunction() || callee.toFunction()->u.n.clasp != &ObjectClass), !args.rval().isPrimitive() && callee != args.rval().toObject()); return true; @@ -472,12 +472,6 @@ JSContext::ensureGeneratorStackSpace() return ok; } -inline js::RegExpStatics * -JSContext::regExpStatics() -{ - return js::GetGlobalForScopeChain(this)->getRegExpStatics(); -} - inline void JSContext::setPendingException(js::Value v) { this->throwing = true; diff --git a/js/src/jscompartment.cpp b/js/src/jscompartment.cpp index fb3e3c1321bf..c36cfc0d6162 100644 --- a/js/src/jscompartment.cpp +++ b/js/src/jscompartment.cpp @@ -84,14 +84,7 @@ JSCompartment::JSCompartment(JSRuntime *rt) jaegerCompartment_(NULL), #endif propertyTree(thisForCtor()), - emptyArgumentsShape(NULL), - emptyBlockShape(NULL), - emptyCallShape(NULL), - emptyDeclEnvShape(NULL), - emptyEnumeratorShape(NULL), - emptyWithShape(NULL), - initialRegExpShape(NULL), - initialStringShape(NULL), + emptyTypeObject(NULL), debugModeBits(rt->debugMode ? DebugFromC : 0), mathCache(NULL), breakpointSites(rt), @@ -121,6 +114,8 @@ JSCompartment::init(JSContext *cx) activeAnalysis = activeInference = false; types.init(cx); + newObjectCache.reset(); + if (!crossCompartmentWrappers.init()) return false; @@ -260,7 +255,8 @@ JSCompartment::wrap(JSContext *cx, Value *vp) JS_ASSERT(obj->isCrossCompartmentWrapper()); if (global->getClass() != &dummy_class && obj->getParent() != global) { do { - obj->setParent(global); + if (!obj->setParent(cx, global)) + return false; obj = obj->getProto(); } while (obj && obj->isCrossCompartmentWrapper()); } @@ -314,7 +310,8 @@ JSCompartment::wrap(JSContext *cx, Value *vp) if (!crossCompartmentWrappers.put(GetProxyPrivate(wrapper), *vp)) return false; - wrapper->setParent(global); + if (!wrapper->setParent(cx, global)) + return false; return true; } @@ -432,11 +429,11 @@ JSCompartment::markTypes(JSTracer *trc) } for (size_t thingKind = FINALIZE_OBJECT0; - thingKind < FINALIZE_FUNCTION_AND_OBJECT_LIMIT; + thingKind < FINALIZE_OBJECT_LIMIT; thingKind++) { for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) { JSObject *object = i.get(); - if (!object->isNewborn() && object->hasSingletonType()) + if (object->hasSingletonType()) MarkRoot(trc, object, "mark_types_singleton"); } } @@ -459,24 +456,17 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes) } } - /* Remove dead empty shapes. */ - if (emptyArgumentsShape && IsAboutToBeFinalized(cx, emptyArgumentsShape)) - emptyArgumentsShape = NULL; - if (emptyBlockShape && IsAboutToBeFinalized(cx, emptyBlockShape)) - emptyBlockShape = NULL; - if (emptyCallShape && IsAboutToBeFinalized(cx, emptyCallShape)) - emptyCallShape = NULL; - if (emptyDeclEnvShape && IsAboutToBeFinalized(cx, emptyDeclEnvShape)) - emptyDeclEnvShape = NULL; - if (emptyEnumeratorShape && IsAboutToBeFinalized(cx, emptyEnumeratorShape)) - emptyEnumeratorShape = NULL; - if (emptyWithShape && IsAboutToBeFinalized(cx, emptyWithShape)) - emptyWithShape = NULL; + /* Remove dead references held weakly by the compartment. */ - if (initialRegExpShape && IsAboutToBeFinalized(cx, initialRegExpShape)) - initialRegExpShape = NULL; - if (initialStringShape && IsAboutToBeFinalized(cx, initialStringShape)) - initialStringShape = NULL; + sweepBaseShapeTable(cx); + sweepInitialShapeTable(cx); + sweepNewTypeObjectTable(cx, newTypeObjects); + sweepNewTypeObjectTable(cx, lazyTypeObjects); + + if (emptyTypeObject && IsAboutToBeFinalized(cx, emptyTypeObject)) + emptyTypeObject = NULL; + + newObjectCache.reset(); sweepBreakpoints(cx); diff --git a/js/src/jscompartment.h b/js/src/jscompartment.h index 1e5c920d45db..933742c97987 100644 --- a/js/src/jscompartment.h +++ b/js/src/jscompartment.h @@ -46,6 +46,7 @@ #include "jsgc.h" #include "jsgcstats.h" #include "jsobj.h" +#include "jsscope.h" #include "vm/GlobalObject.h" #ifdef _MSC_VER @@ -248,37 +249,28 @@ struct JS_FRIEND_API(JSCompartment) { jsrefcount liveDictModeNodes; #endif - typedef js::ReadBarriered BarrieredEmptyShape; - typedef js::ReadBarriered BarrieredShape; + /* Set of all unowned base shapes in the compartment. */ + js::BaseShapeSet baseShapes; + void sweepBaseShapeTable(JSContext *cx); - /* - * Runtime-shared empty scopes for well-known built-in objects that lack - * class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW - */ - BarrieredEmptyShape emptyArgumentsShape; - BarrieredEmptyShape emptyBlockShape; - BarrieredEmptyShape emptyCallShape; - BarrieredEmptyShape emptyDeclEnvShape; - BarrieredEmptyShape emptyEnumeratorShape; - BarrieredEmptyShape emptyWithShape; + /* Set of initial shapes in the compartment. */ + js::InitialShapeSet initialShapes; + void sweepInitialShapeTable(JSContext *cx); - typedef js::HashSet, - js::SystemAllocPolicy> EmptyShapeSet; + /* Set of default 'new' or lazy types in the compartment. */ + js::types::TypeObjectSet newTypeObjects; + js::types::TypeObjectSet lazyTypeObjects; + void sweepNewTypeObjectTable(JSContext *cx, js::types::TypeObjectSet &table); - EmptyShapeSet emptyShapes; + js::types::TypeObject *emptyTypeObject; - /* - * Initial shapes given to RegExp and String objects, encoding the initial - * sets of built-in instance properties and the fixed slots where they must - * be stored (see JSObject::JSSLOT_(REGEXP|STRING)_*). Later property - * additions may cause these shapes to not be used by a RegExp or String - * (even along the entire shape parent chain, should the object go into - * dictionary mode). But because all the initial properties are - * non-configurable, they will always map to fixed slots. - */ - BarrieredShape initialRegExpShape; - BarrieredShape initialStringShape; + /* Get the default 'new' type for objects with a NULL prototype. */ + inline js::types::TypeObject *getEmptyType(JSContext *cx); + + js::types::TypeObject *getLazyType(JSContext *cx, JSObject *proto); + + /* Cache to speed up object creation. */ + js::NewObjectCache newObjectCache; private: enum { DebugFromC = 1, DebugFromJS = 2 }; diff --git a/js/src/jsdate.cpp b/js/src/jsdate.cpp index 72ee7474f88b..ac30a69f3416 100644 --- a/js/src/jsdate.cpp +++ b/js/src/jsdate.cpp @@ -1228,9 +1228,11 @@ SetUTCTime(JSContext *cx, JSObject *obj, jsdouble t, Value *vp = NULL) { JS_ASSERT(obj->isDate()); - size_t slotCap = JS_MIN(obj->numSlots(), JSObject::DATE_CLASS_RESERVED_SLOTS); - for (size_t ind = JSObject::JSSLOT_DATE_COMPONENTS_START; ind < slotCap; ind++) + for (size_t ind = JSObject::JSSLOT_DATE_COMPONENTS_START; + ind < JSObject::DATE_CLASS_RESERVED_SLOTS; + ind++) { obj->setSlot(ind, UndefinedValue()); + } obj->setDateUTCTime(DoubleValue(t)); if (vp) @@ -1257,12 +1259,6 @@ FillLocalTimes(JSContext *cx, JSObject *obj) jsdouble utcTime = obj->getDateUTCTime().toNumber(); - /* Make sure there are slots to store the cached information. */ - if (obj->numSlots() < JSObject::DATE_CLASS_RESERVED_SLOTS) { - if (!obj->growSlots(cx, JSObject::DATE_CLASS_RESERVED_SLOTS)) - return false; - } - if (!JSDOUBLE_IS_FINITE(utcTime)) { for (size_t ind = JSObject::JSSLOT_DATE_COMPONENTS_START; ind < JSObject::DATE_CLASS_RESERVED_SLOTS; @@ -2696,8 +2692,6 @@ js_InitDateClass(JSContext *cx, JSObject *obj) { return NULL; } - if (!cx->typeInferenceEnabled()) - dateProto->brand(cx); if (!DefineConstructorAndPrototype(cx, global, JSProto_Date, ctor, dateProto)) return NULL; @@ -2709,7 +2703,7 @@ JS_FRIEND_API(JSObject *) js_NewDateObjectMsec(JSContext *cx, jsdouble msec_time) { JSObject *obj = NewBuiltinClassInstance(cx, &DateClass); - if (!obj || !obj->ensureSlots(cx, JSObject::DATE_CLASS_RESERVED_SLOTS)) + if (!obj) return NULL; if (!SetUTCTime(cx, obj, msec_time)) return NULL; diff --git a/js/src/jsdbgapi.cpp b/js/src/jsdbgapi.cpp index 4f5515049436..015a18eb7f2c 100644 --- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -616,10 +616,15 @@ JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fpArg) return NULL; JS_ASSERT(fp->callee().isFunction()); - JS_ASSERT(fp->callee().getPrivate() == fp->fun()); return &fp->callee(); } +JS_PUBLIC_API(JSObject *) +JS_GetParentOrScopeChain(JSContext *cx, JSObject *obj) +{ + return obj->scopeChain(); +} + JS_PUBLIC_API(JSBool) JS_IsConstructorFrame(JSContext *cx, JSStackFrame *fp) { @@ -786,7 +791,7 @@ JS_PropertyIterator(JSObject *obj, JSScopeProperty **iteratorp) shape = shape->previous(); if (!shape->previous()) { - JS_ASSERT(JSID_IS_EMPTY(shape->propid)); + JS_ASSERT(shape->isEmptyShape()); shape = NULL; } @@ -799,7 +804,7 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, { assertSameCompartment(cx, obj); Shape *shape = (Shape *) sprop; - pd->id = IdToJsval(shape->propid); + pd->id = IdToJsval(shape->propid()); JSBool wasThrowing = cx->isExceptionPending(); Value lastException = UndefinedValue(); @@ -807,7 +812,7 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, lastException = cx->getPendingException(); cx->clearPendingException(); - if (!js_GetProperty(cx, obj, shape->propid, &pd->value)) { + if (!js_GetProperty(cx, obj, shape->propid(), &pd->value)) { if (!cx->isExceptionPending()) { pd->flags = JSPD_ERROR; pd->value = JSVAL_VOID; @@ -827,21 +832,21 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, | (!shape->configurable() ? JSPD_PERMANENT : 0); pd->spare = 0; if (shape->getter() == GetCallArg) { - pd->slot = shape->shortid; + pd->slot = shape->shortid(); pd->flags |= JSPD_ARGUMENT; } else if (shape->getter() == GetCallVar) { - pd->slot = shape->shortid; + pd->slot = shape->shortid(); pd->flags |= JSPD_VARIABLE; } else { pd->slot = 0; } pd->alias = JSVAL_VOID; - if (obj->containsSlot(shape->slot)) { + if (obj->containsSlot(shape->slot())) { for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) { const Shape &aprop = r.front(); - if (&aprop != shape && aprop.slot == shape->slot) { - pd->alias = IdToJsval(aprop.propid); + if (&aprop != shape && aprop.slot() == shape->slot()) { + pd->alias = IdToJsval(aprop.propid()); break; } } @@ -1063,8 +1068,7 @@ JS_IsSystemObject(JSContext *cx, JSObject *obj) JS_PUBLIC_API(JSBool) JS_MakeSystemObject(JSContext *cx, JSObject *obj) { - obj->setSystem(); - return true; + return obj->setSystem(cx); } /************************************************************************/ @@ -1607,22 +1611,6 @@ js_ResumeVtune() #endif /* MOZ_VTUNE */ -#ifdef MOZ_TRACE_JSCALLS - -JS_PUBLIC_API(void) -JS_SetFunctionCallback(JSContext *cx, JSFunctionCallback fcb) -{ - cx->functionCallback = fcb; -} - -JS_PUBLIC_API(JSFunctionCallback) -JS_GetFunctionCallback(JSContext *cx) -{ - return cx->functionCallback; -} - -#endif /* MOZ_TRACE_JSCALLS */ - JS_PUBLIC_API(void) JS_DumpBytecode(JSContext *cx, JSScript *script) { diff --git a/js/src/jsdbgapi.h b/js/src/jsdbgapi.h index f2b6951d2125..ecce3e891c9c 100644 --- a/js/src/jsdbgapi.h +++ b/js/src/jsdbgapi.h @@ -279,6 +279,9 @@ JS_GetFrameFunction(JSContext *cx, JSStackFrame *fp); extern JS_PUBLIC_API(JSObject *) JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fp); +extern JS_PUBLIC_API(JSObject *) +JS_GetParentOrScopeChain(JSContext *cx, JSObject *obj); + /* XXXrginda Initially published with typo */ #define JS_IsContructorFrame JS_IsConstructorFrame extern JS_PUBLIC_API(JSBool) @@ -458,9 +461,7 @@ JS_GetScriptTotalSize(JSContext *cx, JSScript *script); * Return true if obj is a "system" object, that is, one created by * JS_NewSystemObject with the system flag set and not JS_NewObject. * - * What "system" means is up to the API client, but it can be used to implement - * access control policies based on script filenames and their prefixes, using - * JS_FlagScriptFilenamePrefix and JS_GetTopScriptFilenameFlags. + * What "system" means is up to the API client. */ extern JS_PUBLIC_API(JSBool) JS_IsSystemObject(JSContext *cx, JSObject *obj); @@ -577,28 +578,6 @@ js_ResumeVtune(); #endif /* MOZ_VTUNE */ -#ifdef MOZ_TRACE_JSCALLS -typedef void (*JSFunctionCallback)(const JSFunction *fun, - const JSScript *scr, - const JSContext *cx, - int entering); - -/* - * The callback is expected to be quick and noninvasive. It should not - * trigger interrupts, turn on debugging, or produce uncaught JS - * exceptions. The state of the stack and registers in the context - * cannot be relied upon, since this callback may be invoked directly - * from either JIT. The 'entering' field means we are entering a - * function if it is positive, leaving a function if it is zero or - * negative. - */ -extern JS_PUBLIC_API(void) -JS_SetFunctionCallback(JSContext *cx, JSFunctionCallback fcb); - -extern JS_PUBLIC_API(JSFunctionCallback) -JS_GetFunctionCallback(JSContext *cx); -#endif /* MOZ_TRACE_JSCALLS */ - extern JS_PUBLIC_API(void) JS_DumpBytecode(JSContext *cx, JSScript *script); diff --git a/js/src/jsexn.cpp b/js/src/jsexn.cpp index ff0cd4f4ce70..0bce1ae50f66 100644 --- a/js/src/jsexn.cpp +++ b/js/src/jsexn.cpp @@ -550,7 +550,7 @@ ValueToShortSource(JSContext *cx, const Value &v) /* * XXX Avoid function decompilation bloat for now. */ - str = JS_GetFunctionId(obj->getFunctionPrivate()); + str = JS_GetFunctionId(obj->toFunction()); if (!str && !(str = js_ValueToSource(cx, v))) { /* * Continue to soldier on if the function couldn't be @@ -700,10 +700,6 @@ FilenameToString(JSContext *cx, const char *filename) return JS_NewStringCopyZ(cx, filename); } -enum { - JSSLOT_ERROR_EXNTYPE = 0 -}; - static JSBool Exception(JSContext *cx, uintN argc, Value *vp) { @@ -726,7 +722,7 @@ Exception(JSContext *cx, uintN argc, Value *vp) } JSObject *errProto = &protov.toObject(); - JSObject *obj = NewNativeClassInstance(cx, &ErrorClass, errProto, errProto->getParent()); + JSObject *obj = NewObjectWithGivenProto(cx, &ErrorClass, errProto, NULL); if (!obj) return false; @@ -772,7 +768,7 @@ Exception(JSContext *cx, uintN argc, Value *vp) lineno = iter.done() ? 0 : js_FramePCToLineNumber(cx, iter.fp(), iter.pc()); } - intN exnType = args.callee().getReservedSlot(JSSLOT_ERROR_EXNTYPE).toInt32(); + intN exnType = args.callee().toFunction()->getExtendedSlot(0).toInt32(); if (!InitExnPrivate(cx, obj, message, filename, lineno, NULL, exnType)) return false; @@ -1035,10 +1031,11 @@ InitErrorClass(JSContext *cx, GlobalObject *global, intN type, JSObject &proto) } /* Create the corresponding constructor. */ - JSFunction *ctor = global->createConstructor(cx, Exception, &ErrorClass, name, 1); + JSFunction *ctor = global->createConstructor(cx, Exception, &ErrorClass, name, 1, + JSFunction::ExtendedFinalizeKind); if (!ctor) return NULL; - ctor->setReservedSlot(JSSLOT_ERROR_EXNTYPE, Int32Value(int32(type))); + ctor->setExtendedSlot(0, Int32Value(int32(type))); if (!LinkConstructorAndPrototype(cx, ctor, errorProto)) return NULL; @@ -1174,7 +1171,7 @@ js_ErrorToException(JSContext *cx, const char *message, JSErrorReport *reportp, goto out; tv[0] = OBJECT_TO_JSVAL(errProto); - errObject = NewNativeClassInstance(cx, &ErrorClass, errProto, errProto->getParent()); + errObject = NewObjectWithGivenProto(cx, &ErrorClass, errProto, NULL); if (!errObject) { ok = JS_FALSE; goto out; @@ -1362,7 +1359,7 @@ js_CopyErrorObject(JSContext *cx, JSObject *errobj, JSObject *scope) JSObject *proto; if (!js_GetClassPrototype(cx, scope->getGlobal(), GetExceptionProtoKey(copy->exnType), &proto)) return NULL; - JSObject *copyobj = NewNativeClassInstance(cx, &ErrorClass, proto, proto->getParent()); + JSObject *copyobj = NewObjectWithGivenProto(cx, &ErrorClass, proto, NULL); copyobj->setPrivate(copy); autoFree.p = NULL; return copyobj; diff --git a/js/src/jsfriendapi.cpp b/js/src/jsfriendapi.cpp index acc8adaa0ff0..8b78b6eb789a 100644 --- a/js/src/jsfriendapi.cpp +++ b/js/src/jsfriendapi.cpp @@ -85,7 +85,7 @@ JS_FRIEND_API(JSFunction *) JS_GetObjectFunction(JSObject *obj) { if (obj->isFunction()) - return obj->getFunctionPrivate(); + return obj->toFunction(); return NULL; } @@ -125,14 +125,6 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj return obj; } -JS_FRIEND_API(uint32) -JS_ObjectCountDynamicSlots(JSObject *obj) -{ - if (obj->hasSlotsArray()) - return obj->numDynamicSlots(obj->numSlots()); - return 0; -} - JS_PUBLIC_API(void) JS_ShrinkingGC(JSContext *cx) { @@ -186,20 +178,126 @@ AutoSwitchCompartment::~AutoSwitchCompartment() cx->compartment = oldCompartment; } -#ifdef DEBUG -JS_FRIEND_API(void) -js::CheckReservedSlot(const JSObject *obj, size_t slot) +JS_FRIEND_API(size_t) +js::GetObjectDynamicSlotSize(JSObject *obj, JSMallocSizeOfFun mallocSizeOf) { - CheckSlot(obj, slot); - JS_ASSERT(slot < JSSLOT_FREE(obj->getClass())); + return obj->dynamicSlotSize(mallocSizeOf); +} + +JS_FRIEND_API(size_t) +js::GetCompartmentShapeTableSize(JSCompartment *c, JSMallocSizeOfFun mallocSizeOf) +{ + return c->baseShapes.sizeOfExcludingThis(mallocSizeOf) + + c->initialShapes.sizeOfExcludingThis(mallocSizeOf) + + c->newTypeObjects.sizeOfExcludingThis(mallocSizeOf) + + c->lazyTypeObjects.sizeOfExcludingThis(mallocSizeOf); +} + +JS_FRIEND_API(bool) +js::IsScopeObject(const JSObject *obj) +{ + return obj->isInternalScope(); +} + +JS_FRIEND_API(JSObject *) +js::GetObjectParentMaybeScope(const JSObject *obj) +{ + return obj->scopeChain(); +} + +JS_FRIEND_API(JSObject *) +js::GetGlobalForObjectCrossCompartment(JSObject *obj) +{ + return obj->getGlobal(); +} + +JS_FRIEND_API(uint32) +js::GetObjectSlotSpan(const JSObject *obj) +{ + return obj->slotSpan(); +} + +JS_FRIEND_API(bool) +js::IsOriginalScriptFunction(JSFunction *fun) +{ + return fun->script()->function() == fun; +} + +JS_FRIEND_API(JSFunction *) +js::DefineFunctionWithReserved(JSContext *cx, JSObject *obj, const char *name, JSNative call, + uintN nargs, uintN attrs) +{ + JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment); + CHECK_REQUEST(cx); + assertSameCompartment(cx, obj); + JSAtom *atom = js_Atomize(cx, name, strlen(name)); + if (!atom) + return NULL; + return js_DefineFunction(cx, obj, ATOM_TO_JSID(atom), call, nargs, attrs, + JSFunction::ExtendedFinalizeKind); +} + +JS_FRIEND_API(JSFunction *) +js::NewFunctionWithReserved(JSContext *cx, JSNative native, uintN nargs, uintN flags, + JSObject *parent, const char *name) +{ + JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment); + JSAtom *atom; + + CHECK_REQUEST(cx); + assertSameCompartment(cx, parent); + + if (!name) { + atom = NULL; + } else { + atom = js_Atomize(cx, name, strlen(name)); + if (!atom) + return NULL; + } + + return js_NewFunction(cx, NULL, native, nargs, flags, parent, atom, + JSFunction::ExtendedFinalizeKind); +} + +JS_FRIEND_API(JSFunction *) +js::NewFunctionByIdWithReserved(JSContext *cx, JSNative native, uintN nargs, uintN flags, JSObject *parent, + jsid id) +{ + JS_ASSERT(JSID_IS_STRING(id)); + JS_THREADSAFE_ASSERT(cx->compartment != cx->runtime->atomsCompartment); + CHECK_REQUEST(cx); + assertSameCompartment(cx, parent); + + return js_NewFunction(cx, NULL, native, nargs, flags, parent, JSID_TO_ATOM(id), + JSFunction::ExtendedFinalizeKind); +} + +JS_FRIEND_API(JSObject *) +js::InitClassWithReserved(JSContext *cx, JSObject *obj, JSObject *parent_proto, + JSClass *clasp, JSNative constructor, uintN nargs, + JSPropertySpec *ps, JSFunctionSpec *fs, + JSPropertySpec *static_ps, JSFunctionSpec *static_fs) +{ + CHECK_REQUEST(cx); + assertSameCompartment(cx, obj, parent_proto); + return js_InitClass(cx, obj, parent_proto, Valueify(clasp), constructor, + nargs, ps, fs, static_ps, static_fs, NULL, + JSFunction::ExtendedFinalizeKind); +} + +JS_FRIEND_API(const Value &) +js::GetFunctionNativeReserved(JSObject *fun, size_t which) +{ + JS_ASSERT(fun->toFunction()->isNative()); + return fun->toFunction()->getExtendedSlot(which); } JS_FRIEND_API(void) -js::CheckSlot(const JSObject *obj, size_t slot) +js::SetFunctionNativeReserved(JSObject *fun, size_t which, const Value &val) { - JS_ASSERT(slot < obj->numSlots()); + JS_ASSERT(fun->toFunction()->isNative()); + fun->toFunction()->setExtendedSlot(which, val); } -#endif /* * The below code is for temporary telemetry use. It can be removed when diff --git a/js/src/jsfriendapi.h b/js/src/jsfriendapi.h index 2c0a687f851a..7d67f5f25ec4 100644 --- a/js/src/jsfriendapi.h +++ b/js/src/jsfriendapi.h @@ -113,7 +113,6 @@ typedef struct TypeInferenceMemoryStats int64 objects; int64 tables; int64 temporary; - int64 emptyShapes; } TypeInferenceMemoryStats; extern JS_FRIEND_API(void) @@ -199,6 +198,12 @@ JS_FRIEND_API(JSBool) obj_defineGetter(JSContext *cx, uintN argc, js::Value *vp) JS_FRIEND_API(JSBool) obj_defineSetter(JSContext *cx, uintN argc, js::Value *vp); #endif +extern JS_FRIEND_API(size_t) +GetObjectDynamicSlotSize(JSObject *obj, JSMallocSizeOfFun mallocSizeOf); + +extern JS_FRIEND_API(size_t) +GetCompartmentShapeTableSize(JSCompartment *c, JSMallocSizeOfFun mallocSizeOf); + /* * Check whether it is OK to assign an undeclared property with name * propname of the global object in the current script on cx. Reports @@ -244,24 +249,34 @@ struct TypeObject { JSObject *proto; }; -struct Object { - void *_1; +struct BaseShape { js::Class *clasp; - uint32 flags; - uint32 objShape; - void *_2; JSObject *parent; - void *privateData; - jsuword capacity; - js::Value *slots; - TypeObject *type; +}; + +struct Shape { + BaseShape *base; + jsid _1; + uint32 slotInfo; static const uint32 FIXED_SLOTS_SHIFT = 27; +}; + +struct Object { + Shape *shape; + TypeObject *type; + js::Value *slots; + js::Value *_1; + + size_t numFixedSlots() const { return shape->slotInfo >> Shape::FIXED_SLOTS_SHIFT; } + Value *fixedSlots() const { + return (Value *)((jsuword) this + sizeof(shadow::Object)); + } js::Value &slotRef(size_t slot) const { - size_t nfixed = flags >> FIXED_SLOTS_SHIFT; + size_t nfixed = numFixedSlots(); if (slot < nfixed) - return ((Value *)((jsuword) this + sizeof(shadow::Object)))[slot]; + return fixedSlots()[slot]; return slots[slot - nfixed]; } }; @@ -284,7 +299,7 @@ extern JS_FRIEND_DATA(js::Class) XMLClass; inline js::Class * GetObjectClass(const JSObject *obj) { - return reinterpret_cast(obj)->clasp; + return reinterpret_cast(obj)->shape->base->clasp; } inline JSClass * @@ -293,12 +308,49 @@ GetObjectJSClass(const JSObject *obj) return js::Jsvalify(GetObjectClass(obj)); } +JS_FRIEND_API(bool) +IsScopeObject(const JSObject *obj); + inline JSObject * GetObjectParent(const JSObject *obj) { - return reinterpret_cast(obj)->parent; + JS_ASSERT(!IsScopeObject(obj)); + return reinterpret_cast(obj)->shape->base->parent; } +JS_FRIEND_API(JSObject *) +GetObjectParentMaybeScope(const JSObject *obj); + +JS_FRIEND_API(JSObject *) +GetGlobalForObjectCrossCompartment(JSObject *obj); + +JS_FRIEND_API(bool) +IsOriginalScriptFunction(JSFunction *fun); + +JS_FRIEND_API(JSFunction *) +DefineFunctionWithReserved(JSContext *cx, JSObject *obj, const char *name, JSNative call, + uintN nargs, uintN attrs); + +JS_FRIEND_API(JSFunction *) +NewFunctionWithReserved(JSContext *cx, JSNative call, uintN nargs, uintN flags, + JSObject *parent, const char *name); + +JS_FRIEND_API(JSFunction *) +NewFunctionByIdWithReserved(JSContext *cx, JSNative native, uintN nargs, uintN flags, + JSObject *parent, jsid id); + +JS_FRIEND_API(JSObject *) +InitClassWithReserved(JSContext *cx, JSObject *obj, JSObject *parent_proto, + JSClass *clasp, JSNative constructor, uintN nargs, + JSPropertySpec *ps, JSFunctionSpec *fs, + JSPropertySpec *static_ps, JSFunctionSpec *static_fs); + +JS_FRIEND_API(const Value &) +GetFunctionNativeReserved(JSObject *fun, size_t which); + +JS_FRIEND_API(void) +SetFunctionNativeReserved(JSObject *fun, size_t which, const Value &val); + inline JSObject * GetObjectProto(const JSObject *obj) { @@ -308,25 +360,11 @@ GetObjectProto(const JSObject *obj) inline void * GetObjectPrivate(const JSObject *obj) { - return reinterpret_cast(obj)->privateData; + const shadow::Object *nobj = reinterpret_cast(obj); + void **addr = reinterpret_cast(&nobj->fixedSlots()[nobj->numFixedSlots()]); + return *addr; } -inline JSObject * -GetObjectGlobal(JSObject *obj) -{ - while (JSObject *parent = GetObjectParent(obj)) - obj = parent; - return obj; -} - -#ifdef DEBUG -extern JS_FRIEND_API(void) CheckReservedSlot(const JSObject *obj, size_t slot); -extern JS_FRIEND_API(void) CheckSlot(const JSObject *obj, size_t slot); -#else -inline void CheckReservedSlot(const JSObject *obj, size_t slot) {} -inline void CheckSlot(const JSObject *obj, size_t slot) {} -#endif - /* * Get a slot that is both reserved for object's clasp *and* is fixed (fits * within the maximum capacity for the object's fixed slots). @@ -334,34 +372,32 @@ inline void CheckSlot(const JSObject *obj, size_t slot) {} inline const Value & GetReservedSlot(const JSObject *obj, size_t slot) { - CheckReservedSlot(obj, slot); + JS_ASSERT(slot < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj))); return reinterpret_cast(obj)->slotRef(slot); } inline void SetReservedSlot(JSObject *obj, size_t slot, const Value &value) { - CheckReservedSlot(obj, slot); + JS_ASSERT(slot < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj))); reinterpret_cast(obj)->slotRef(slot) = value; } -inline uint32 -GetNumSlots(const JSObject *obj) -{ - return uint32(reinterpret_cast(obj)->capacity); -} +JS_FRIEND_API(uint32) +GetObjectSlotSpan(const JSObject *obj); inline const Value & -GetSlot(const JSObject *obj, size_t slot) +GetObjectSlot(const JSObject *obj, size_t slot) { - CheckSlot(obj, slot); + JS_ASSERT(slot < GetObjectSlotSpan(obj)); return reinterpret_cast(obj)->slotRef(slot); } -inline uint32 +inline Shape * GetObjectShape(const JSObject *obj) { - return reinterpret_cast(obj)->objShape; + shadow::Shape *shape = reinterpret_cast(obj)->shape; + return reinterpret_cast(shape); } static inline js::PropertyOp diff --git a/js/src/jsfun.cpp b/js/src/jsfun.cpp index 3b0e74bf12ae..db4d25f5ebfc 100644 --- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -123,7 +123,7 @@ js_GetArgsValue(JSContext *cx, StackFrame *fp, Value *vp) } js::ArgumentsObject * -ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee) +ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee, StackFrame *fp) { JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX); @@ -135,13 +135,12 @@ ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee) if (!type) return NULL; - JS_STATIC_ASSERT(NormalArgumentsObject::RESERVED_SLOTS == 2); - JS_STATIC_ASSERT(StrictArgumentsObject::RESERVED_SLOTS == 2); - JSObject *obj = js_NewGCObject(cx, FINALIZE_OBJECT2); - if (!obj) - return NULL; - - EmptyShape *emptyArgumentsShape = EmptyShape::getEmptyArgumentsShape(cx); + bool strict = callee.toFunction()->inStrictMode(); + Class *clasp = strict ? &StrictArgumentsObjectClass : &NormalArgumentsObjectClass; + Shape *emptyArgumentsShape = + EmptyShape::getInitialShape(cx, clasp, proto, + proto->getParent(), FINALIZE_KIND, + BaseShape::INDEXED); if (!emptyArgumentsShape) return NULL; @@ -153,18 +152,20 @@ ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee) data->callee.init(ObjectValue(callee)); InitValueRange(data->slots, argc, false); - /* Can't fail from here on, so initialize everything in argsobj. */ - obj->init(cx, callee.getFunctionPrivate()->inStrictMode() - ? &StrictArgumentsObjectClass - : &NormalArgumentsObjectClass, - type, proto->getParent(), NULL, false); - obj->initMap(emptyArgumentsShape); + /* We have everything needed to fill in the object, so make the object. */ + JSObject *obj = JSObject::create(cx, FINALIZE_KIND, emptyArgumentsShape, type, NULL); + if (!obj) + return NULL; ArgumentsObject *argsobj = obj->asArguments(); JS_ASSERT(UINT32_MAX > (uint64(argc) << PACKED_BITS_COUNT)); argsobj->initInitialLength(argc); argsobj->initData(data); + argsobj->setStackFrame(strict ? NULL : fp); + + JS_ASSERT(argsobj->numFixedSlots() >= NormalArgumentsObject::RESERVED_SLOTS); + JS_ASSERT(argsobj->numFixedSlots() >= StrictArgumentsObject::RESERVED_SLOTS); return argsobj; } @@ -207,7 +208,7 @@ js_GetArgsObject(JSContext *cx, StackFrame *fp) return &fp->argsObj(); ArgumentsObject *argsobj = - ArgumentsObject::create(cx, fp->numActualArgs(), fp->callee()); + ArgumentsObject::create(cx, fp->numActualArgs(), fp->callee(), fp); if (!argsobj) return argsobj; @@ -522,24 +523,6 @@ args_finalize(JSContext *cx, JSObject *obj) cx->free_(reinterpret_cast(obj->asArguments()->data())); } -/* - * If a generator's arguments or call object escapes, and the generator frame - * is not executing, the generator object needs to be marked because it is not - * otherwise reachable. An executing generator is rooted by its invocation. To - * distinguish the two cases (which imply different access paths to the - * generator object), we use the JSFRAME_FLOATING_GENERATOR flag, which is only - * set on the StackFrame kept in the generator object's JSGenerator. - */ -static inline void -MaybeMarkGenerator(JSTracer *trc, JSObject *obj) -{ -#if JS_HAS_GENERATORS - StackFrame *fp = (StackFrame *) obj->getPrivate(); - if (fp && fp->isFloatingGenerator()) - MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object"); -#endif -} - static void args_trace(JSTracer *trc, JSObject *obj) { @@ -548,7 +531,20 @@ args_trace(JSTracer *trc, JSObject *obj) MarkValue(trc, data->callee, js_callee_str); MarkValueRange(trc, argsobj->initialLength(), data->slots, js_arguments_str); - MaybeMarkGenerator(trc, argsobj); + /* + * If a generator's arguments or call object escapes, and the generator + * frame is not executing, the generator object needs to be marked because + * it is not otherwise reachable. An executing generator is rooted by its + * invocation. To distinguish the two cases (which imply different access + * paths to the generator object), we use the JSFRAME_FLOATING_GENERATOR + * flag, which is only set on the StackFrame kept in the generator object's + * JSGenerator. + */ +#if JS_HAS_GENERATORS + StackFrame *fp = argsobj->maybeStackFrame(); + if (fp && fp->isFloatingGenerator()) + MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object"); +#endif } /* @@ -559,7 +555,7 @@ args_trace(JSTracer *trc, JSObject *obj) */ Class js::NormalArgumentsObjectClass = { "Arguments", - JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | + JSCLASS_NEW_RESOLVE | JSCLASS_HAS_RESERVED_SLOTS(NormalArgumentsObject::RESERVED_SLOTS) | JSCLASS_HAS_CACHED_PROTO(JSProto_Object), JS_PropertyStub, /* addProperty */ @@ -586,7 +582,7 @@ Class js::NormalArgumentsObjectClass = { */ Class js::StrictArgumentsObjectClass = { "Arguments", - JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | + JSCLASS_NEW_RESOLVE | JSCLASS_HAS_RESERVED_SLOTS(StrictArgumentsObject::RESERVED_SLOTS) | JSCLASS_HAS_CACHED_PROTO(JSProto_Object), JS_PropertyStub, /* addProperty */ @@ -612,7 +608,9 @@ Class js::StrictArgumentsObjectClass = { */ Class js::DeclEnvClass = { js_Object_str, - JSCLASS_HAS_PRIVATE | JSCLASS_HAS_CACHED_PROTO(JSProto_Object), + JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_RESERVED_SLOTS(CallObject::DECL_ENV_RESERVED_SLOTS) | + JSCLASS_HAS_CACHED_PROTO(JSProto_Object), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ @@ -625,15 +623,25 @@ Class js::DeclEnvClass = { static inline JSObject * NewDeclEnvObject(JSContext *cx, StackFrame *fp) { - JSObject *envobj = js_NewGCObject(cx, FINALIZE_OBJECT2); - if (!envobj) + types::TypeObject *type = cx->compartment->getEmptyType(cx); + if (!type) return NULL; - EmptyShape *emptyDeclEnvShape = EmptyShape::getEmptyDeclEnvShape(cx); + JSObject *parent = fp->scopeChain().getGlobal(); + Shape *emptyDeclEnvShape = + EmptyShape::getInitialShape(cx, &DeclEnvClass, NULL, + parent, CallObject::DECL_ENV_FINALIZE_KIND); if (!emptyDeclEnvShape) return NULL; - envobj->init(cx, &DeclEnvClass, &emptyTypeObject, &fp->scopeChain(), fp, false); - envobj->initMap(emptyDeclEnvShape); + + JSObject *envobj = JSObject::create(cx, CallObject::DECL_ENV_FINALIZE_KIND, + emptyDeclEnvShape, type, NULL); + if (!envobj) + return NULL; + envobj->setPrivate(fp); + + if (!envobj->setInternalScopeChain(cx, &fp->scopeChain())) + return NULL; return envobj; } @@ -724,13 +732,11 @@ js_PutCallObject(StackFrame *fp) callobj.copyValues(0, NULL, bindings.countVars(), fp->slots()); } else { JSFunction *fun = fp->fun(); - JS_ASSERT(fun == callobj.getCalleeFunction()); + JS_ASSERT(script == callobj.getCalleeFunction()->script()); JS_ASSERT(script == fun->script()); uintN n = bindings.countArgsAndVars(); if (n > 0) { - JS_ASSERT(CallObject::RESERVED_SLOTS + n <= callobj.numSlots()); - uint32 nvars = bindings.countVars(); uint32 nargs = bindings.countArgs(); JS_ASSERT(fun->nargs == nargs); @@ -785,7 +791,7 @@ js_PutCallObject(StackFrame *fp) /* Clear private pointers to fp, which is about to go away. */ if (js_IsNamedLambda(fun)) { - JSObject *env = callobj.getParent(); + JSObject *env = callobj.internalScopeChain(); JS_ASSERT(env->isDeclEnv()); JS_ASSERT(env->getPrivate() == fp); @@ -854,7 +860,7 @@ SetCallArg(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp) JSFunction *fun = callobj.getCalleeFunction(); JSScript *script = fun->script(); - if (!script->ensureHasTypes(cx, fun)) + if (!script->ensureHasTypes(cx)) return false; TypeScript::SetArgument(cx, script, i, *vp); @@ -869,7 +875,7 @@ GetCallUpvar(JSContext *cx, JSObject *obj, jsid id, Value *vp) JS_ASSERT((int16) JSID_TO_INT(id) == JSID_TO_INT(id)); uintN i = (uint16) JSID_TO_INT(id); - *vp = callobj.getCallee()->getFlatClosureUpvar(i); + *vp = callobj.getCallee()->toFunction()->getFlatClosureUpvar(i); return true; } @@ -880,7 +886,7 @@ SetCallUpvar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp) JS_ASSERT((int16) JSID_TO_INT(id) == JSID_TO_INT(id)); uintN i = (uint16) JSID_TO_INT(id); - callobj.getCallee()->setFlatClosureUpvar(i, *vp); + callobj.getCallee()->toFunction()->setFlatClosureUpvar(i, *vp); return true; } @@ -913,7 +919,7 @@ SetCallVar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp) JSFunction *fun = callobj.getCalleeFunction(); JSScript *script = fun->script(); - if (!script->ensureHasTypes(cx, fun)) + if (!script->ensureHasTypes(cx)) return false; TypeScript::SetLocal(cx, script, i, *vp); @@ -934,7 +940,7 @@ call_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, JSObject **objp JSObject *callee = obj->asCall().getCallee(); #ifdef DEBUG if (callee) { - JSScript *script = callee->getFunctionPrivate()->script(); + JSScript *script = callee->toFunction()->script(); JS_ASSERT(!script->bindings.hasBinding(cx, JSID_TO_ATOM(id))); } #endif @@ -967,7 +973,12 @@ call_trace(JSTracer *trc, JSObject *obj) { JS_ASSERT(obj->isCall()); - MaybeMarkGenerator(trc, obj); + /* Mark any generator frame, as for arguments objects. */ +#if JS_HAS_GENERATORS + StackFrame *fp = (StackFrame *) obj->getPrivate(); + if (fp && fp->isFloatingGenerator()) + MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object"); +#endif } JS_PUBLIC_DATA(Class) js::CallClass = { @@ -1000,9 +1011,8 @@ StackFrame::getValidCalleeObject(JSContext *cx, Value *vp) return true; } - JSFunction *fun = this->fun(); - JSObject &funobj = callee(); - vp->setObject(funobj); + JSFunction *fun = this->callee().toFunction(); + vp->setObject(*fun); /* * Check for an escape attempt by a joined function object, which must go @@ -1010,90 +1020,84 @@ StackFrame::getValidCalleeObject(JSContext *cx, Value *vp) * atom by which it was uniquely associated with a property. */ const Value &thisv = functionThis(); - if (thisv.isObject()) { - JS_ASSERT(funobj.getFunctionPrivate() == fun); + if (thisv.isObject() && fun->methodAtom() && !fun->isClonedMethod()) { + JSObject *thisp = &thisv.toObject(); + JSObject *first_barriered_thisp = NULL; - if (fun->compiledFunObj() == funobj && fun->methodAtom()) { - JSObject *thisp = &thisv.toObject(); - JSObject *first_barriered_thisp = NULL; + do { + /* + * While a non-native object is responsible for handling its + * entire prototype chain, notable non-natives including dense + * and typed arrays have native prototypes, so keep going. + */ + if (!thisp->isNative()) + continue; - do { + const Shape *shape = thisp->nativeLookup(cx, ATOM_TO_JSID(fun->methodAtom())); + if (shape) { /* - * While a non-native object is responsible for handling its - * entire prototype chain, notable non-natives including dense - * and typed arrays have native prototypes, so keep going. + * Two cases follow: the method barrier was not crossed + * yet, so we cross it here; the method barrier *was* + * crossed but after the call, in which case we fetch + * and validate the cloned (unjoined) funobj from the + * method property's slot. + * + * In either case we must allow for the method property + * to have been replaced, or its value overwritten. */ - if (!thisp->isNative()) - continue; - - if (thisp->hasMethodBarrier()) { - const Shape *shape = thisp->nativeLookup(cx, ATOM_TO_JSID(fun->methodAtom())); - if (shape) { - /* - * Two cases follow: the method barrier was not crossed - * yet, so we cross it here; the method barrier *was* - * crossed but after the call, in which case we fetch - * and validate the cloned (unjoined) funobj from the - * method property's slot. - * - * In either case we must allow for the method property - * to have been replaced, or its value overwritten. - */ - if (shape->isMethod() && shape->methodObject() == funobj) { - if (!thisp->methodReadBarrier(cx, *shape, vp)) - return false; - overwriteCallee(vp->toObject()); - return true; - } - - if (shape->hasSlot()) { - Value v = thisp->getSlot(shape->slot); - JSObject *clone; - - if (IsFunctionObject(v, &clone) && - clone->getFunctionPrivate() == fun && - clone->hasMethodObj(*thisp)) { - /* - * N.B. If the method barrier was on a function - * with singleton type, then while crossing the - * method barrier CloneFunctionObject will have - * ignored the attempt to clone the function. - */ - JS_ASSERT_IF(!clone->hasSingletonType(), clone != &funobj); - *vp = v; - overwriteCallee(*clone); - return true; - } - } - } - - if (!first_barriered_thisp) - first_barriered_thisp = thisp; + if (shape->isMethod() && thisp->nativeGetMethod(shape) == fun) { + if (!thisp->methodReadBarrier(cx, *shape, vp)) + return false; + overwriteCallee(vp->toObject()); + return true; } - } while ((thisp = thisp->getProto()) != NULL); + + if (shape->hasSlot()) { + Value v = thisp->getSlot(shape->slot()); + JSFunction *clone; + + if (IsFunctionObject(v, &clone) && + clone->script() == fun->script() && + clone->methodObj() == thisp) { + /* + * N.B. If the method barrier was on a function + * with singleton type, then while crossing the + * method barrier CloneFunctionObject will have + * ignored the attempt to clone the function. + */ + JS_ASSERT_IF(!clone->hasSingletonType(), clone != fun); + *vp = v; + overwriteCallee(*clone); + return true; + } + } + } if (!first_barriered_thisp) - return true; + first_barriered_thisp = thisp; + } while ((thisp = thisp->getProto()) != NULL); - /* - * At this point, we couldn't find an already-existing clone (or - * force to exist a fresh clone) created via thisp's method read - * barrier, so we must clone fun and store it in fp's callee to - * avoid re-cloning upon repeated foo.caller access. - * - * This must mean the code in js_DeleteProperty could not find this - * stack frame on the stack when the method was deleted. We've lost - * track of the method, so we associate it with the first barriered - * object found starting from thisp on the prototype chain. - */ - JSObject *newfunobj = CloneFunctionObject(cx, fun); - if (!newfunobj) - return false; - newfunobj->setMethodObj(*first_barriered_thisp); - overwriteCallee(*newfunobj); - vp->setObject(*newfunobj); + if (!first_barriered_thisp) return true; - } + + /* + * At this point, we couldn't find an already-existing clone (or + * force to exist a fresh clone) created via thisp's method read + * barrier, so we must clone fun and store it in fp's callee to + * avoid re-cloning upon repeated foo.caller access. + * + * This must mean the code in js_DeleteProperty could not find this + * stack frame on the stack when the method was deleted. We've lost + * track of the method, so we associate it with the first barriered + * object found starting from thisp on the prototype chain. + */ + JSFunction *newfunobj = CloneFunctionObject(cx, fun); + if (!newfunobj) + return false; + newfunobj->setMethodObj(*first_barriered_thisp); + overwriteCallee(*newfunobj); + vp->setObject(*newfunobj); + return true; } return true; @@ -1107,7 +1111,7 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) if (!obj) return true; } - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); /* * Mark the function's script as uninlineable, to expand any of its @@ -1179,7 +1183,7 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) if (caller.compartment() != cx->compartment) { vp->setNull(); } else if (caller.isFunction()) { - JSFunction *callerFun = caller.getFunctionPrivate(); + JSFunction *callerFun = caller.toFunction(); if (callerFun->isInterpreted() && callerFun->inStrictMode()) { JS_ReportErrorFlagsAndNumber(cx, JSREPORT_ERROR, js_GetErrorMessage, NULL, JSMSG_CALLER_IS_STRICT); @@ -1239,7 +1243,7 @@ static JSObject * ResolveInterpretedFunctionPrototype(JSContext *cx, JSObject *obj) { #ifdef DEBUG - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); JS_ASSERT(fun->isInterpreted()); JS_ASSERT(!fun->isFunctionPrototype()); #endif @@ -1256,11 +1260,10 @@ ResolveInterpretedFunctionPrototype(JSContext *cx, JSObject *obj) * Make the prototype object an instance of Object with the same parent * as the function object itself. */ - JSObject *parent = obj->getParent(); JSObject *objProto; - if (!js_GetClassPrototype(cx, parent, JSProto_Object, &objProto)) + if (!js_GetClassPrototype(cx, obj->getParent(), JSProto_Object, &objProto)) return NULL; - JSObject *proto = NewNativeClassInstance(cx, &ObjectClass, objProto, parent); + JSObject *proto = NewObjectWithGivenProto(cx, &ObjectClass, objProto, NULL); if (!proto || !proto->setSingletonType(cx)) return NULL; @@ -1289,7 +1292,7 @@ fun_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, if (!JSID_IS_ATOM(id)) return true; - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); if (JSID_IS_ATOM(id, cx->runtime->atomState.classPrototypeAtom)) { /* @@ -1379,7 +1382,7 @@ js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp) cx = xdr->cx; JSScript *script; if (xdr->mode == JSXDR_ENCODE) { - fun = (*objp)->getFunctionPrivate(); + fun = (*objp)->toFunction(); if (!fun->isInterpreted()) { JSAutoByteString funNameBytes; if (const char *name = GetFunctionNameBytes(cx, fun, &funNameBytes)) { @@ -1388,15 +1391,17 @@ js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp) } return false; } - firstword = (fun->u.i.skipmin << 2) | !!fun->atom; + firstword = !!fun->atom; flagsword = (fun->nargs << 16) | fun->flags; script = fun->script(); } else { fun = js_NewFunction(cx, NULL, NULL, 0, JSFUN_INTERPRETED, NULL, NULL); if (!fun) return false; - fun->clearParent(); - fun->clearType(); + if (!fun->clearParent(cx)) + return false; + if (!fun->clearType(cx)) + return false; script = NULL; } @@ -1416,7 +1421,6 @@ js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp) fun->nargs = flagsword >> 16; JS_ASSERT((flagsword & JSFUN_KINDMASK) >= JSFUN_INTERPRETED); fun->flags = uint16(flagsword); - fun->u.i.skipmin = uint16(firstword >> 2); fun->setScript(script); if (!script->typeSetFunction(cx, fun)) return false; @@ -1445,7 +1449,7 @@ fun_hasInstance(JSContext *cx, JSObject *obj, const Value *v, JSBool *bp) while (obj->isFunction()) { if (!obj->isBoundFunction()) break; - obj = obj->getBoundFunctionTarget(); + obj = obj->toFunction()->getBoundFunctionTarget(); } Value pval; @@ -1465,42 +1469,41 @@ fun_hasInstance(JSContext *cx, JSObject *obj, const Value *v, JSBool *bp) return JS_TRUE; } +inline void +JSFunction::trace(JSTracer *trc) +{ + if (isFlatClosure() && hasFlatClosureUpvars()) { + if (HeapValue *upvars = getFlatClosureUpvars()) + MarkValueRange(trc, script()->bindings.countUpvars(), upvars, "upvars"); + } + + if (isExtended()) { + MarkValueRange(trc, ArrayLength(toExtended()->extendedSlots), + toExtended()->extendedSlots, "nativeReserved"); + } + + if (atom) + MarkAtom(trc, atom, "atom"); + + if (isInterpreted()) { + if (script()) + MarkScript(trc, script(), "script"); + if (environment()) + MarkObjectUnbarriered(trc, environment(), "fun_callscope"); + } +} + static void fun_trace(JSTracer *trc, JSObject *obj) { - /* A newborn function object may have a not yet initialized private slot. */ - JSFunction *fun = (JSFunction *) obj->getPrivate(); - if (!fun) - return; - - if (fun != obj) { - /* - * obj is a cloned function object, trace the clone-parent, fun. - * This is safe to leave Unbarriered for incremental GC because any - * change to fun will trigger a setPrivate barrer. But we'll need to - * fix this for generational GC. - */ - MarkObjectUnbarriered(trc, fun, "private"); - - /* The function could be a flat closure with upvar copies in the clone. */ - if (fun->isFlatClosure() && fun->script()->bindings.hasUpvars()) { - MarkValueRange(trc, fun->script()->bindings.countUpvars(), - obj->getFlatClosureData()->upvars, "upvars"); - } - return; - } - - if (fun->atom) - MarkAtom(trc, fun->atom, "atom"); - - if (fun->isInterpreted() && fun->script()) - MarkScript(trc, fun->script(), "script"); + obj->toFunction()->trace(trc); } static void fun_finalize(JSContext *cx, JSObject *obj) { - obj->finalizeUpvarsIfFlatClosure(); + if (obj->toFunction()->isFlatClosure()) + obj->toFunction()->finalizeUpvars(); } /* @@ -1510,10 +1513,8 @@ fun_finalize(JSContext *cx, JSObject *obj) */ JS_FRIEND_DATA(Class) js::FunctionClass = { js_Function_str, - JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | - JSCLASS_HAS_RESERVED_SLOTS(JSFunction::CLASS_RESERVED_SLOTS) | - JSCLASS_HAS_CACHED_PROTO(JSProto_Function) | - JSCLASS_CONCURRENT_FINALIZER, + JSCLASS_NEW_RESOLVE | + JSCLASS_HAS_CACHED_PROTO(JSProto_Function), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ @@ -1544,7 +1545,7 @@ fun_toStringHelper(JSContext *cx, JSObject *obj, uintN indent) return NULL; } - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); if (!fun) return NULL; @@ -1714,35 +1715,40 @@ CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp); } +static const uint32 JSSLOT_BOUND_FUNCTION_THIS = 0; +static const uint32 JSSLOT_BOUND_FUNCTION_ARGS_COUNT = 1; + +static const uint32 BOUND_FUNCTION_RESERVED_SLOTS = 2; + inline bool -JSObject::initBoundFunction(JSContext *cx, const Value &thisArg, - const Value *args, uintN argslen) +JSFunction::initBoundFunction(JSContext *cx, const Value &thisArg, + const Value *args, uintN argslen) { JS_ASSERT(isFunction()); - flags |= JSObject::BOUND_FUNCTION; + /* + * Convert to a dictionary to set the BOUND_FUNCTION flag and increase + * the slot span to cover the arguments and additional slots for the 'this' + * value and arguments count. + */ + if (!toDictionaryMode(cx)) + return false; + + lastProperty()->base()->setObjectFlag(BaseShape::BOUND_FUNCTION); + + if (!setSlotSpan(cx, BOUND_FUNCTION_RESERVED_SLOTS + argslen)) + return false; + setSlot(JSSLOT_BOUND_FUNCTION_THIS, thisArg); setSlot(JSSLOT_BOUND_FUNCTION_ARGS_COUNT, PrivateUint32Value(argslen)); - if (argslen != 0) { - /* FIXME? Burn memory on an empty scope whose shape covers the args slots. */ - EmptyShape *empty = EmptyShape::create(cx, getClass()); - if (!empty) - return false; - empty->slotSpan += argslen; - setMap(empty); + copySlotRange(BOUND_FUNCTION_RESERVED_SLOTS, args, argslen, false); - if (!ensureInstanceReservedSlots(cx, argslen)) - return false; - - JS_ASSERT(numSlots() >= argslen + FUN_CLASS_RESERVED_SLOTS); - copySlotRange(FUN_CLASS_RESERVED_SLOTS, args, argslen, false); - } return true; } inline JSObject * -JSObject::getBoundFunctionTarget() const +JSFunction::getBoundFunctionTarget() const { JS_ASSERT(isFunction()); JS_ASSERT(isBoundFunction()); @@ -1752,7 +1758,7 @@ JSObject::getBoundFunctionTarget() const } inline const js::Value & -JSObject::getBoundFunctionThis() const +JSFunction::getBoundFunctionThis() const { JS_ASSERT(isFunction()); JS_ASSERT(isBoundFunction()); @@ -1761,17 +1767,17 @@ JSObject::getBoundFunctionThis() const } inline const js::Value & -JSObject::getBoundFunctionArgument(uintN which) const +JSFunction::getBoundFunctionArgument(uintN which) const { JS_ASSERT(isFunction()); JS_ASSERT(isBoundFunction()); JS_ASSERT(which < getBoundFunctionArgumentCount()); - return getSlot(FUN_CLASS_RESERVED_SLOTS + which); + return getSlot(BOUND_FUNCTION_RESERVED_SLOTS + which); } inline size_t -JSObject::getBoundFunctionArgumentCount() const +JSFunction::getBoundFunctionArgumentCount() const { JS_ASSERT(isFunction()); JS_ASSERT(isBoundFunction()); @@ -1785,14 +1791,13 @@ namespace js { JSBool CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp) { - JSObject *obj = &vp[0].toObject(); - JS_ASSERT(obj->isFunction()); - JS_ASSERT(obj->isBoundFunction()); + JSFunction *fun = vp[0].toObject().toFunction(); + JS_ASSERT(fun->isBoundFunction()); bool constructing = IsConstructing(vp); /* 15.3.4.5.1 step 1, 15.3.4.5.2 step 3. */ - uintN argslen = obj->getBoundFunctionArgumentCount(); + uintN argslen = fun->getBoundFunctionArgumentCount(); if (argc + argslen > StackSpace::ARGS_LENGTH_MAX) { js_ReportAllocationOverflow(cx); @@ -1800,10 +1805,10 @@ CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp) } /* 15.3.4.5.1 step 3, 15.3.4.5.2 step 1. */ - JSObject *target = obj->getBoundFunctionTarget(); + JSObject *target = fun->getBoundFunctionTarget(); /* 15.3.4.5.1 step 2. */ - const Value &boundThis = obj->getBoundFunctionThis(); + const Value &boundThis = fun->getBoundFunctionThis(); InvokeArgsGuard args; if (!cx->stack.pushInvokeArgs(cx, argc + argslen, &args)) @@ -1811,7 +1816,7 @@ CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp) /* 15.3.4.5.1, 15.3.4.5.2 step 4. */ for (uintN i = 0; i < argslen; i++) - args[i] = obj->getBoundFunctionArgument(i); + args[i] = fun->getBoundFunctionArgument(i); memcpy(args.array() + argslen, vp + 2, argc * sizeof(Value)); /* 15.3.4.5.1, 15.3.4.5.2 step 5. */ @@ -1833,14 +1838,12 @@ CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp) static JSBool fun_isGenerator(JSContext *cx, uintN argc, Value *vp) { - JSObject *funobj; - if (!IsFunctionObject(vp[1], &funobj)) { + JSFunction *fun; + if (!IsFunctionObject(vp[1], &fun)) { JS_SET_RVAL(cx, vp, BooleanValue(false)); return true; } - JSFunction *fun = funobj->getFunctionPrivate(); - bool result = false; if (fun->isInterpreted()) { JSScript *script = fun->script(); @@ -1881,24 +1884,27 @@ fun_bind(JSContext *cx, uintN argc, Value *vp) /* Steps 15-16. */ uintN length = 0; if (target->isFunction()) { - uintN nargs = target->getFunctionPrivate()->nargs; + uintN nargs = target->toFunction()->nargs; if (nargs > argslen) length = nargs - argslen; } /* Step 4-6, 10-11. */ - JSAtom *name = target->isFunction() ? target->getFunctionPrivate()->atom : NULL; + JSAtom *name = target->isFunction() ? target->toFunction()->atom : NULL; - /* NB: Bound functions abuse |parent| to store their target. */ JSObject *funobj = js_NewFunction(cx, NULL, CallOrConstructBoundFunction, length, JSFUN_CONSTRUCTOR, target, name); if (!funobj) return false; + /* NB: Bound functions abuse |parent| to store their target. */ + if (!funobj->setParent(cx, target)) + return false; + /* Steps 7-9. */ Value thisArg = args.length() >= 1 ? args[0] : UndefinedValue(); - if (!funobj->initBoundFunction(cx, thisArg, boundArgs, argslen)) + if (!funobj->toFunction()->initBoundFunction(cx, thisArg, boundArgs, argslen)) return false; /* Steps 17, 19-21 are handled by fun_resolve. */ @@ -2117,7 +2123,7 @@ const Shape * LookupInterpretedFunctionPrototype(JSContext *cx, JSObject *funobj) { #ifdef DEBUG - JSFunction *fun = funobj->getFunctionPrivate(); + JSFunction *fun = funobj->toFunction(); JS_ASSERT(fun->isInterpreted()); JS_ASSERT(!fun->isFunctionPrototype()); JS_ASSERT(!funobj->isBoundFunction()); @@ -2141,21 +2147,22 @@ LookupInterpretedFunctionPrototype(JSContext *cx, JSObject *funobj) JSFunction * js_NewFunction(JSContext *cx, JSObject *funobj, Native native, uintN nargs, - uintN flags, JSObject *parent, JSAtom *atom) + uintN flags, JSObject *parent, JSAtom *atom, js::gc::AllocKind kind) { + JS_ASSERT(kind == JSFunction::FinalizeKind || kind == JSFunction::ExtendedFinalizeKind); + JS_ASSERT(sizeof(JSFunction) <= gc::Arena::thingSize(JSFunction::FinalizeKind)); + JS_ASSERT(sizeof(FunctionExtended) <= gc::Arena::thingSize(JSFunction::ExtendedFinalizeKind)); + JSFunction *fun; if (funobj) { JS_ASSERT(funobj->isFunction()); - funobj->setParent(parent); + JS_ASSERT(funobj->getParent() == parent); } else { - funobj = NewFunction(cx, parent); + funobj = NewObjectWithClassProto(cx, &FunctionClass, NULL, SkipScopeParent(parent), kind); if (!funobj) return NULL; - if (native && !funobj->setSingletonType(cx)) - return NULL; } - JS_ASSERT(!funobj->getPrivate()); fun = static_cast(funobj); /* Initialize all function members. */ @@ -2163,38 +2170,51 @@ js_NewFunction(JSContext *cx, JSObject *funobj, Native native, uintN nargs, fun->flags = flags & (JSFUN_FLAGS_MASK | JSFUN_KINDMASK); if ((flags & JSFUN_KINDMASK) >= JSFUN_INTERPRETED) { JS_ASSERT(!native); - JS_ASSERT(nargs == 0); - fun->u.i.skipmin = 0; fun->script().init(NULL); + fun->setEnvironment(parent); } else { fun->u.n.clasp = NULL; fun->u.n.native = native; JS_ASSERT(fun->u.n.native); } + if (kind == JSFunction::ExtendedFinalizeKind) { + fun->flags |= JSFUN_EXTENDED; + fun->initializeExtended(); + } fun->atom = atom; - /* Set private to self to indicate non-cloned fully initialized function. */ - fun->setPrivate(fun); + if (native && !fun->setSingletonType(cx)) + return NULL; + return fun; } -JSObject * JS_FASTCALL +JSFunction * JS_FASTCALL js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent, - JSObject *proto) + JSObject *proto, gc::AllocKind kind) { JS_ASSERT(parent); JS_ASSERT(proto); - JSObject *clone; - if (cx->compartment == fun->compartment()) { - /* - * The cloned function object does not need the extra JSFunction members - * beyond JSObject as it points to fun via the private slot. - */ - clone = NewNativeClassInstance(cx, &FunctionClass, proto, parent); - if (!clone) - return NULL; + JSObject *cloneobj = NewObjectWithClassProto(cx, &FunctionClass, NULL, SkipScopeParent(parent), kind); + if (!cloneobj) + return NULL; + JSFunction *clone = static_cast(cloneobj); + clone->nargs = fun->nargs; + clone->flags = fun->flags & ~JSFUN_EXTENDED; + clone->u = fun->toFunction()->u; + clone->atom = fun->atom; + + if (kind == JSFunction::ExtendedFinalizeKind) { + clone->flags |= JSFUN_EXTENDED; + clone->initializeExtended(); + } + + if (clone->isInterpreted()) + clone->setEnvironment(parent); + + if (cx->compartment == fun->compartment()) { /* * We can use the same type as the original function provided that (a) * its prototype is correct, and (b) its type is not a singleton. The @@ -2203,41 +2223,30 @@ js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent, * definitions or read barriers, so will not get here. */ if (fun->getProto() == proto && !fun->hasSingletonType()) - clone->initType(fun->type()); - - clone->setPrivate(fun); + clone->setType(fun->type()); } else { /* - * Across compartments we have to deep copy JSFunction and clone the - * script (for interpreted functions). + * Across compartments we have to clone the script for interpreted + * functions. */ - clone = NewFunction(cx, parent); - if (!clone) - return NULL; - - JSFunction *cfun = (JSFunction *) clone; - cfun->nargs = fun->nargs; - cfun->flags = fun->flags; - cfun->u = fun->getFunctionPrivate()->u; - cfun->atom = fun->atom; - clone->setPrivate(cfun); - if (cfun->isInterpreted()) { - JSScript *script = fun->script(); + if (clone->isInterpreted()) { + JSScript *script = clone->script(); JS_ASSERT(script); JS_ASSERT(script->compartment() == fun->compartment()); JS_ASSERT(script->compartment() != cx->compartment); - cfun->script().init(NULL); + clone->script().init(NULL); JSScript *cscript = js_CloneScript(cx, script); if (!cscript) return NULL; - cscript->globalObject = cfun->getGlobal(); - cfun->setScript(cscript); - if (!cscript->typeSetFunction(cx, cfun)) + + cscript->globalObject = clone->getGlobal(); + clone->setScript(cscript); + if (!cscript->typeSetFunction(cx, clone)) return NULL; - js_CallNewScriptHook(cx, cfun->script(), cfun); - Debugger::onNewScript(cx, cfun->script(), NULL); + js_CallNewScriptHook(cx, clone->script(), clone); + Debugger::onNewScript(cx, clone->script(), NULL); } } return clone; @@ -2248,7 +2257,7 @@ js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent, * values. The tracer calls this function and then initializes the upvar * slots on trace. */ -JSObject * JS_FASTCALL +JSFunction * JS_FASTCALL js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain) { JS_ASSERT(fun->isFlatClosure()); @@ -2257,7 +2266,7 @@ js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain) JS_ASSERT_IF(JSScript::isValidOffset(fun->script()->upvarsOffset), fun->script()->upvars()->length == fun->script()->bindings.countUpvars()); - JSObject *closure = CloneFunctionObject(cx, fun, scopeChain, true); + JSFunction *closure = CloneFunctionObject(cx, fun, scopeChain, JSFunction::ExtendedFinalizeKind); if (!closure) return closure; @@ -2265,15 +2274,15 @@ js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain) if (nslots == 0) return closure; - FlatClosureData *data = (FlatClosureData *) cx->malloc_(nslots * sizeof(HeapValue)); + HeapValue *data = (HeapValue *) cx->malloc_(nslots * sizeof(HeapValue)); if (!data) return NULL; - closure->setFlatClosureData(data); + closure->setExtendedSlot(JSFunction::FLAT_CLOSURE_UPVARS_SLOT, PrivateValue(data)); return closure; } -JSObject * +JSFunction * js_NewFlatClosure(JSContext *cx, JSFunction *fun, JSOp op, size_t oplen) { /* @@ -2288,23 +2297,22 @@ js_NewFlatClosure(JSContext *cx, JSFunction *fun, JSOp op, size_t oplen) VOUCH_DOES_NOT_REQUIRE_STACK(); JSObject *scopeChain = &cx->fp()->scopeChain(); - JSObject *closure = js_AllocFlatClosure(cx, fun, scopeChain); + JSFunction *closure = js_AllocFlatClosure(cx, fun, scopeChain); if (!closure || !fun->script()->bindings.hasUpvars()) return closure; - FlatClosureData *data = closure->getFlatClosureData(); uintN level = fun->script()->staticLevel; JSUpvarArray *uva = fun->script()->upvars(); for (uint32 i = 0, n = uva->length; i < n; i++) - data->upvars[i].init(GetUpvar(cx, level, uva->vector[i])); + closure->initFlatClosureUpvar(i, GetUpvar(cx, level, uva->vector[i])); return closure; } JSFunction * js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, Native native, - uintN nargs, uintN attrs) + uintN nargs, uintN attrs, AllocKind kind) { PropertyOp gop; StrictPropertyOp sop; @@ -2325,58 +2333,14 @@ js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, Native native, sop = NULL; } - /* - * Historically, all objects have had a parent member as intrinsic scope - * chain link. We want to move away from this universal parent, but JS - * requires that function objects have something like parent (ES3 and ES5 - * call it the [[Scope]] internal property), to bake a particular static - * scope environment into each function object. - * - * All function objects thus have parent, including all native functions. - * All native functions defined by the JS_DefineFunction* APIs are created - * via the call below to js_NewFunction, which passes obj as the parent - * parameter, and so binds fun's parent to obj using JSObject::setParent, - * under js_NewFunction (in JSObject::init, called from NewObject -- see - * jsobjinlines.h). - * - * But JSObject::setParent sets the DELEGATE object flag on its receiver, - * to mark the object as a proto or parent of another object. Such objects - * may intervene in property lookups and scope chain searches, so require - * special handling when caching lookup and search results (since such - * intervening objects can in general grow shadowing properties later). - * - * Thus using setParent prematurely flags certain objects, notably class - * prototypes, so that defining native methods on them, where the method's - * name (e.g., toString) is already bound on Object.prototype, triggers - * shadowingShapeChange events and gratuitous shape regeneration. - * - * To fix this longstanding bug, we set check whether obj is already a - * delegate, and if not, then if js_NewFunction flagged obj as a delegate, - * we clear the flag. - * - * We thus rely on the fact that native functions (including indirect eval) - * do not use the property cache or equivalent JIT techniques that require - * this bit to be set on their parent-linked scope chain objects. - * - * Note: we keep API compatibility by setting parent to obj for all native - * function objects, even if obj->getGlobal() would suffice. This should be - * revisited when parent is narrowed to exist only for function objects and - * possibly a few prehistoric scope objects (e.g. event targets). - * - * FIXME: bug 611190. - */ - bool wasDelegate = obj->isDelegate(); - fun = js_NewFunction(cx, NULL, native, nargs, attrs & (JSFUN_FLAGS_MASK), obj, - JSID_IS_ATOM(id) ? JSID_TO_ATOM(id) : NULL); + JSID_IS_ATOM(id) ? JSID_TO_ATOM(id) : NULL, + kind); if (!fun) return NULL; - if (!wasDelegate && obj->isDelegate()) - obj->clearDelegate(); - if (!obj->defineGeneric(cx, id, ObjectValue(*fun), gop, sop, attrs & ~JSFUN_FLAGS_MASK)) return NULL; @@ -2388,24 +2352,12 @@ JS_STATIC_ASSERT((JSV2F_CONSTRUCT & JSV2F_SEARCH_STACK) == 0); JSFunction * js_ValueToFunction(JSContext *cx, const Value *vp, uintN flags) { - JSObject *funobj; - if (!IsFunctionObject(*vp, &funobj)) { + JSFunction *fun; + if (!IsFunctionObject(*vp, &fun)) { js_ReportIsNotFunction(cx, vp, flags); return NULL; } - return funobj->getFunctionPrivate(); -} - -JSObject * -js_ValueToFunctionObject(JSContext *cx, Value *vp, uintN flags) -{ - JSObject *funobj; - if (!IsFunctionObject(*vp, &funobj)) { - js_ReportIsNotFunction(cx, vp, flags); - return NULL; - } - - return funobj; + return fun; } JSObject * diff --git a/js/src/jsfun.h b/js/src/jsfun.h index b10ac9e3bffa..5f061fa0aea9 100644 --- a/js/src/jsfun.h +++ b/js/src/jsfun.h @@ -94,16 +94,17 @@ global object */ #define JSFUN_EXPR_CLOSURE 0x1000 /* expression closure: function(x) x*x */ +#define JSFUN_EXTENDED 0x2000 /* structure is FunctionExtended */ #define JSFUN_INTERPRETED 0x4000 /* use u.i if kind >= this value else u.n */ #define JSFUN_FLAT_CLOSURE 0x8000 /* flat (aka "display") closure */ #define JSFUN_NULL_CLOSURE 0xc000 /* null closure entrains no scope chain */ #define JSFUN_KINDMASK 0xc000 /* encode interp vs. native and closure optimization level -- see above */ -struct JSFunction : public JSObject_Slots2 -{ - /* Functions always have two fixed slots (FUN_CLASS_RESERVED_SLOTS). */ +namespace js { class FunctionExtended; } +struct JSFunction : public JSObject +{ uint16 nargs; /* maximum number of specified arguments, reflected as f.length/f.arity */ uint16 flags; /* flags, see JSFUN_* below and in jsapi.h */ @@ -116,10 +117,7 @@ struct JSFunction : public JSObject_Slots2 struct Scripted { JSScript *script_; /* interpreted bytecode descriptor or null; use the setter! */ - uint16 skipmin; /* net skip amount up (toward zero) from - script_->staticLevel to nearest upvar, - including upvars in nested functions */ - js::Shape *names; /* argument and variable names */ + JSObject *env; /* environment for new activations */ } i; void *nativeOrScript; } u; @@ -163,37 +161,17 @@ struct JSFunction : public JSObject_Slots2 return flags & JSFUN_JOINABLE; } - JSObject &compiledFunObj() { - return *this; - } - - private: /* - * FunctionClass reserves two slots, which are free in JSObject::fslots - * without requiring dslots allocation. Null closures that can be joined to - * a compiler-created function object use the first one to hold a mutable - * methodAtom() state variable, needed for correct foo.caller handling. + * For an interpreted function, accessors for the initial scope object of + * activations (stack frames) of the function. */ - enum { - METHOD_ATOM_SLOT = JSSLOT_FUN_METHOD_ATOM - }; + inline JSObject *environment() const; + inline void setEnvironment(JSObject *obj); + + static inline size_t offsetOfEnvironment() { return offsetof(JSFunction, u.i.env); } - public: inline void setJoinable(); - /* - * Method name imputed from property uniquely assigned to or initialized, - * where the function does not need to be cloned to carry a scope chain or - * flattened upvars. - */ - JSAtom *methodAtom() const { - return (joinable() && getSlot(METHOD_ATOM_SLOT).isString()) - ? &getSlot(METHOD_ATOM_SLOT).toString()->asAtom() - : NULL; - } - - inline void setMethodAtom(JSAtom *atom); - js::HeapPtrScript &script() const { JS_ASSERT(isInterpreted()); return *(js::HeapPtrScript *)&u.i.script_; @@ -221,10 +199,6 @@ struct JSFunction : public JSObject_Slots2 return offsetof(JSFunction, u.nativeOrScript); } - /* Number of extra fixed function object slots. */ - static const uint32 CLASS_RESERVED_SLOTS = JSObject::FUN_CLASS_RESERVED_SLOTS; - - js::Class *getConstructorClass() const { JS_ASSERT(isNative()); return u.n.clasp; @@ -234,228 +208,127 @@ struct JSFunction : public JSObject_Slots2 JS_ASSERT(isNative()); u.n.clasp = clasp; } + +#if JS_BITS_PER_WORD == 32 + static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT2; + static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT4; +#else + static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT4; + static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT8; +#endif + + inline void trace(JSTracer *trc); + + /* Bound function accessors. */ + + inline bool initBoundFunction(JSContext *cx, const js::Value &thisArg, + const js::Value *args, uintN argslen); + + inline JSObject *getBoundFunctionTarget() const; + inline const js::Value &getBoundFunctionThis() const; + inline const js::Value &getBoundFunctionArgument(uintN which) const; + inline size_t getBoundFunctionArgumentCount() const; + + private: + inline js::FunctionExtended *toExtended(); + inline const js::FunctionExtended *toExtended() const; + + inline bool isExtended() const { + JS_STATIC_ASSERT(FinalizeKind != ExtendedFinalizeKind); + JS_ASSERT(!!(flags & JSFUN_EXTENDED) == (getAllocKind() == ExtendedFinalizeKind)); + return !!(flags & JSFUN_EXTENDED); + } + + public: + /* Accessors for data stored in extended functions. */ + + inline void initializeExtended(); + + inline void setExtendedSlot(size_t which, const js::Value &val); + inline const js::Value &getExtendedSlot(size_t which) const; + + /* + * Flat closures with one or more upvars snapshot the upvars' values + * into a vector of js::Values referenced from here. This is a private + * pointer but is set only at creation and does not need to be barriered. + */ + static const uint32 FLAT_CLOSURE_UPVARS_SLOT = 0; + + static inline size_t getFlatClosureUpvarsOffset(); + + inline js::Value getFlatClosureUpvar(uint32 i) const; + inline void setFlatClosureUpvar(uint32 i, const js::Value &v); + inline void initFlatClosureUpvar(uint32 i, const js::Value &v); + + private: + inline bool hasFlatClosureUpvars() const; + inline js::HeapValue *getFlatClosureUpvars() const; + public: + + /* See comments in fun_finalize. */ + inline void finalizeUpvars(); + + /* Slot holding associated method property, needed for foo.caller handling. */ + static const uint32 METHOD_PROPERTY_SLOT = 0; + + /* For cloned methods, slot holding the object this was cloned as a property from. */ + static const uint32 METHOD_OBJECT_SLOT = 1; + + /* Whether this is a function cloned from a method. */ + inline bool isClonedMethod() const; + + /* For a cloned method, pointer to the object the method was cloned for. */ + inline JSObject *methodObj() const; + inline void setMethodObj(JSObject& obj); + + /* + * Method name imputed from property uniquely assigned to or initialized, + * where the function does not need to be cloned to carry a scope chain or + * flattened upvars. This is set on both the original and cloned function. + */ + inline JSAtom *methodAtom() const; + inline void setMethodAtom(JSAtom *atom); }; inline JSFunction * -JSObject::getFunctionPrivate() const +JSObject::toFunction() { - JS_ASSERT(isFunction()); - return reinterpret_cast(getPrivate()); + JS_ASSERT(JS_ObjectIsFunction(NULL, this)); + return static_cast(this); } -namespace js { - -struct FlatClosureData { - HeapValue upvars[1]; -}; - -static JS_ALWAYS_INLINE bool -IsFunctionObject(const js::Value &v) +inline const JSFunction * +JSObject::toFunction() const { - return v.isObject() && v.toObject().isFunction(); + JS_ASSERT(JS_ObjectIsFunction(NULL, const_cast(this))); + return static_cast(this); } -static JS_ALWAYS_INLINE bool -IsFunctionObject(const js::Value &v, JSObject **funobj) -{ - return v.isObject() && (*funobj = &v.toObject())->isFunction(); -} - -static JS_ALWAYS_INLINE bool -IsFunctionObject(const js::Value &v, JSObject **funobj, JSFunction **fun) -{ - bool b = IsFunctionObject(v, funobj); - if (b) - *fun = (*funobj)->getFunctionPrivate(); - return b; -} - -static JS_ALWAYS_INLINE bool -IsFunctionObject(const js::Value &v, JSFunction **fun) -{ - JSObject *funobj; - return IsFunctionObject(v, &funobj, fun); -} - -static JS_ALWAYS_INLINE bool -IsNativeFunction(const js::Value &v) -{ - JSFunction *fun; - return IsFunctionObject(v, &fun) && fun->isNative(); -} - -static JS_ALWAYS_INLINE bool -IsNativeFunction(const js::Value &v, JSFunction **fun) -{ - return IsFunctionObject(v, fun) && (*fun)->isNative(); -} - -static JS_ALWAYS_INLINE bool -IsNativeFunction(const js::Value &v, JSNative native) -{ - JSFunction *fun; - return IsFunctionObject(v, &fun) && fun->maybeNative() == native; -} - -/* - * When we have an object of a builtin class, we don't quite know what its - * valueOf/toString methods are, since these methods may have been overwritten - * or shadowed. However, we can still do better than the general case by - * hard-coding the necessary properties for us to find the native we expect. - * - * TODO: a per-thread shape-based cache would be faster and simpler. - */ -static JS_ALWAYS_INLINE bool -ClassMethodIsNative(JSContext *cx, JSObject *obj, Class *clasp, jsid methodid, JSNative native) -{ - JS_ASSERT(obj->getClass() == clasp); - - Value v; - if (!HasDataProperty(cx, obj, methodid, &v)) { - JSObject *proto = obj->getProto(); - if (!proto || proto->getClass() != clasp || !HasDataProperty(cx, proto, methodid, &v)) - return false; - } - - return js::IsNativeFunction(v, native); -} - -extern JS_ALWAYS_INLINE bool -SameTraceType(const Value &lhs, const Value &rhs) -{ - return SameType(lhs, rhs) && - (lhs.isPrimitive() || - lhs.toObject().isFunction() == rhs.toObject().isFunction()); -} - -/* - * Return true if this is a compiler-created internal function accessed by - * its own object. Such a function object must not be accessible to script - * or embedding code. - */ -inline bool -IsInternalFunctionObject(JSObject *funobj) -{ - JS_ASSERT(funobj->isFunction()); - JSFunction *fun = (JSFunction *) funobj->getPrivate(); - return funobj == fun && (fun->flags & JSFUN_LAMBDA) && !funobj->getParent(); -} - -/* Valueified JS_IsConstructing. */ -static JS_ALWAYS_INLINE bool -IsConstructing(const Value *vp) -{ -#ifdef DEBUG - JSObject *callee = &JS_CALLEE(cx, vp).toObject(); - if (callee->isFunction()) { - JSFunction *fun = callee->getFunctionPrivate(); - JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0); - } else { - JS_ASSERT(callee->getClass()->construct != NULL); - } -#endif - return vp[1].isMagic(); -} - -inline bool -IsConstructing(CallReceiver call); - -static JS_ALWAYS_INLINE bool -IsConstructing_PossiblyWithGivenThisObject(const Value *vp, JSObject **ctorThis) -{ -#ifdef DEBUG - JSObject *callee = &JS_CALLEE(cx, vp).toObject(); - if (callee->isFunction()) { - JSFunction *fun = callee->getFunctionPrivate(); - JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0); - } else { - JS_ASSERT(callee->getClass()->construct != NULL); - } -#endif - bool isCtor = vp[1].isMagic(); - if (isCtor) - *ctorThis = vp[1].getMagicObjectOrNullPayload(); - return isCtor; -} - -inline const char * -GetFunctionNameBytes(JSContext *cx, JSFunction *fun, JSAutoByteString *bytes) -{ - if (fun->atom) - return bytes->encode(cx, fun->atom); - return js_anonymous_str; -} - -extern JSFunctionSpec function_methods[]; - -extern JSBool -Function(JSContext *cx, uintN argc, Value *vp); - -extern bool -IsBuiltinFunctionConstructor(JSFunction *fun); - -/* - * Preconditions: funobj->isInterpreted() && !funobj->isFunctionPrototype() && - * !funobj->isBoundFunction(). This is sufficient to establish that funobj has - * a non-configurable non-method .prototype data property, thought it might not - * have been resolved yet, and its value could be anything. - * - * Return the shape of the .prototype property of funobj, resolving it if - * needed. On error, return NULL. - * - * This is not safe to call on trace because it defines properties, which can - * trigger lookups that could reenter. - */ -const Shape * -LookupInterpretedFunctionPrototype(JSContext *cx, JSObject *funobj); - -} /* namespace js */ - extern JSString * fun_toStringHelper(JSContext *cx, JSObject *obj, uintN indent); extern JSFunction * js_NewFunction(JSContext *cx, JSObject *funobj, JSNative native, uintN nargs, - uintN flags, JSObject *parent, JSAtom *atom); + uintN flags, JSObject *parent, JSAtom *atom, + js::gc::AllocKind kind = JSFunction::FinalizeKind); extern void js_FinalizeFunction(JSContext *cx, JSFunction *fun); -extern JSObject * JS_FASTCALL -js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent, - JSObject *proto); +extern JSFunction * JS_FASTCALL +js_CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent, JSObject *proto, + js::gc::AllocKind kind = JSFunction::FinalizeKind); -inline JSObject * -CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent, - bool ignoreSingletonClone = false); - -inline JSObject * -CloneFunctionObject(JSContext *cx, JSFunction *fun) -{ - /* - * Variant which makes an exact clone of fun, preserving parent and proto. - * Calling the above version CloneFunctionObject(cx, fun, fun->getParent()) - * is not equivalent: API clients, including XPConnect, can reparent - * objects so that fun->getGlobal() != fun->getProto()->getGlobal(). - * See ReparentWrapperIfFound. - */ - JS_ASSERT(fun->getParent() && fun->getProto()); - - if (fun->hasSingletonType()) - return fun; - - return js_CloneFunctionObject(cx, fun, fun->getParent(), fun->getProto()); -} - -extern JSObject * JS_FASTCALL +extern JSFunction * JS_FASTCALL js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain); -extern JSObject * +extern JSFunction * js_NewFlatClosure(JSContext *cx, JSFunction *fun, JSOp op, size_t oplen); extern JSFunction * js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, JSNative native, - uintN nargs, uintN flags); + uintN nargs, uintN flags, + js::gc::AllocKind kind = JSFunction::FinalizeKind); /* * Flags for js_ValueToFunction and js_ReportIsNotFunction. @@ -466,9 +339,6 @@ js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, JSNative native, extern JSFunction * js_ValueToFunction(JSContext *cx, const js::Value *vp, uintN flags); -extern JSObject * -js_ValueToFunctionObject(JSContext *cx, js::Value *vp, uintN flags); - extern JSObject * js_ValueToCallableObject(JSContext *cx, js::Value *vp, uintN flags); @@ -511,8 +381,35 @@ SetCallVar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, js::Value *vp); extern JSBool SetCallUpvar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, js::Value *vp); +/* + * Function extended with reserved slots for use by various kinds of functions. + * Most functions do not have these extensions, but enough are that efficient + * storage is required (no malloc'ed reserved slots). + */ +class FunctionExtended : public JSFunction +{ + friend struct JSFunction; + + /* Reserved slots available for storage by particular native functions. */ + HeapValue extendedSlots[2]; +}; + } // namespace js +inline js::FunctionExtended * +JSFunction::toExtended() +{ + JS_ASSERT(isExtended()); + return static_cast(this); +} + +inline const js::FunctionExtended * +JSFunction::toExtended() const +{ + JS_ASSERT(isExtended()); + return static_cast(this); +} + extern JSBool js_GetArgsValue(JSContext *cx, js::StackFrame *fp, js::Value *vp); diff --git a/js/src/jsfuninlines.h b/js/src/jsfuninlines.h index 80c94ca04427..cd21a6496848 100644 --- a/js/src/jsfuninlines.h +++ b/js/src/jsfuninlines.h @@ -45,58 +45,363 @@ #include "vm/GlobalObject.h" -inline bool -js::IsConstructing(CallReceiver call) -{ - return IsConstructing(call.base()); -} - inline bool JSFunction::inStrictMode() const { return script()->strictModeCode; } +inline JSObject * +JSFunction::environment() const +{ + JS_ASSERT(isInterpreted()); + return u.i.env; +} + +inline void +JSFunction::setEnvironment(JSObject *obj) +{ + JS_ASSERT(isInterpreted()); + u.i.env = obj; +} + +inline void +JSFunction::initializeExtended() +{ + JS_ASSERT(isExtended()); + + JS_ASSERT(js::ArrayLength(toExtended()->extendedSlots) == 2); + toExtended()->extendedSlots[0].init(js::UndefinedValue()); + toExtended()->extendedSlots[1].init(js::UndefinedValue()); +} + inline void JSFunction::setJoinable() { JS_ASSERT(isInterpreted()); - setSlot(METHOD_ATOM_SLOT, js::NullValue()); flags |= JSFUN_JOINABLE; } +inline bool +JSFunction::isClonedMethod() const +{ + return joinable() && isExtended() && getExtendedSlot(METHOD_OBJECT_SLOT).isObject(); +} + +inline JSAtom * +JSFunction::methodAtom() const +{ + return (joinable() && isExtended() && getExtendedSlot(METHOD_PROPERTY_SLOT).isString()) + ? (JSAtom *) getExtendedSlot(METHOD_PROPERTY_SLOT).toString() + : NULL; +} + inline void JSFunction::setMethodAtom(JSAtom *atom) { JS_ASSERT(joinable()); - setSlot(METHOD_ATOM_SLOT, js::StringValue(atom)); + setExtendedSlot(METHOD_PROPERTY_SLOT, js::StringValue(atom)); } inline JSObject * +JSFunction::methodObj() const +{ + JS_ASSERT(joinable()); + return isClonedMethod() ? &getExtendedSlot(METHOD_OBJECT_SLOT).toObject() : NULL; +} + +inline void +JSFunction::setMethodObj(JSObject& obj) +{ + JS_ASSERT(joinable()); + setExtendedSlot(METHOD_OBJECT_SLOT, js::ObjectValue(obj)); +} + +inline void +JSFunction::setExtendedSlot(size_t which, const js::Value &val) +{ + JS_ASSERT(which < js::ArrayLength(toExtended()->extendedSlots)); + toExtended()->extendedSlots[which] = val; +} + +inline const js::Value & +JSFunction::getExtendedSlot(size_t which) const +{ + JS_ASSERT(which < js::ArrayLength(toExtended()->extendedSlots)); + return toExtended()->extendedSlots[which]; +} + +inline bool +JSFunction::hasFlatClosureUpvars() const +{ + JS_ASSERT(isFlatClosure()); + return isExtended() && !getExtendedSlot(FLAT_CLOSURE_UPVARS_SLOT).isUndefined(); +} + +inline js::HeapValue * +JSFunction::getFlatClosureUpvars() const +{ + JS_ASSERT(hasFlatClosureUpvars()); + return (js::HeapValue *) getExtendedSlot(FLAT_CLOSURE_UPVARS_SLOT).toPrivate(); +} + +inline void +JSFunction::finalizeUpvars() +{ + /* + * Cloned function objects may be flat closures with upvars to free. + * + * We must not access JSScript here that is stored in JSFunction. The + * script can be finalized before the function or closure instances. So we + * just check if JSSLOT_FLAT_CLOSURE_UPVARS holds a private value encoded + * as a double. We must also ignore newborn closures that do not have the + * private pointer set. + * + * FIXME bug 648320 - allocate upvars on the GC heap to avoid doing it + * here explicitly. + */ + if (hasFlatClosureUpvars()) { + js::HeapValue *upvars = getFlatClosureUpvars(); + js::Foreground::free_(upvars); + } +} + +inline js::Value +JSFunction::getFlatClosureUpvar(uint32 i) const +{ + JS_ASSERT(hasFlatClosureUpvars()); + JS_ASSERT(script()->bindings.countUpvars() == script()->upvars()->length); + JS_ASSERT(i < script()->bindings.countUpvars()); + return getFlatClosureUpvars()[i]; +} + +inline void +JSFunction::setFlatClosureUpvar(uint32 i, const js::Value &v) +{ + JS_ASSERT(isFlatClosure()); + JS_ASSERT(script()->bindings.countUpvars() == script()->upvars()->length); + JS_ASSERT(i < script()->bindings.countUpvars()); + getFlatClosureUpvars()[i] = v; +} + +inline void +JSFunction::initFlatClosureUpvar(uint32 i, const js::Value &v) +{ + JS_ASSERT(isFlatClosure()); + JS_ASSERT(script()->bindings.countUpvars() == script()->upvars()->length); + JS_ASSERT(i < script()->bindings.countUpvars()); + getFlatClosureUpvars()[i].init(v); +} + +/* static */ inline size_t +JSFunction::getFlatClosureUpvarsOffset() +{ + return offsetof(js::FunctionExtended, extendedSlots[FLAT_CLOSURE_UPVARS_SLOT]); +} + +namespace js { + +static JS_ALWAYS_INLINE bool +IsFunctionObject(const js::Value &v) +{ + return v.isObject() && v.toObject().isFunction(); +} + +static JS_ALWAYS_INLINE bool +IsFunctionObject(const js::Value &v, JSFunction **fun) +{ + if (v.isObject() && v.toObject().isFunction()) { + *fun = v.toObject().toFunction(); + return true; + } + return false; +} + +static JS_ALWAYS_INLINE bool +IsNativeFunction(const js::Value &v) +{ + JSFunction *fun; + return IsFunctionObject(v, &fun) && fun->isNative(); +} + +static JS_ALWAYS_INLINE bool +IsNativeFunction(const js::Value &v, JSFunction **fun) +{ + return IsFunctionObject(v, fun) && (*fun)->isNative(); +} + +static JS_ALWAYS_INLINE bool +IsNativeFunction(const js::Value &v, JSNative native) +{ + JSFunction *fun; + return IsFunctionObject(v, &fun) && fun->maybeNative() == native; +} + +/* + * When we have an object of a builtin class, we don't quite know what its + * valueOf/toString methods are, since these methods may have been overwritten + * or shadowed. However, we can still do better than the general case by + * hard-coding the necessary properties for us to find the native we expect. + * + * TODO: a per-thread shape-based cache would be faster and simpler. + */ +static JS_ALWAYS_INLINE bool +ClassMethodIsNative(JSContext *cx, JSObject *obj, Class *clasp, jsid methodid, JSNative native) +{ + JS_ASSERT(obj->getClass() == clasp); + + Value v; + if (!HasDataProperty(cx, obj, methodid, &v)) { + JSObject *proto = obj->getProto(); + if (!proto || proto->getClass() != clasp || !HasDataProperty(cx, proto, methodid, &v)) + return false; + } + + return js::IsNativeFunction(v, native); +} + +extern JS_ALWAYS_INLINE bool +SameTraceType(const Value &lhs, const Value &rhs) +{ + return SameType(lhs, rhs) && + (lhs.isPrimitive() || + lhs.toObject().isFunction() == rhs.toObject().isFunction()); +} + +/* Valueified JS_IsConstructing. */ +static JS_ALWAYS_INLINE bool +IsConstructing(const Value *vp) +{ +#ifdef DEBUG + JSObject *callee = &JS_CALLEE(cx, vp).toObject(); + if (callee->isFunction()) { + JSFunction *fun = callee->toFunction(); + JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0); + } else { + JS_ASSERT(callee->getClass()->construct != NULL); + } +#endif + return vp[1].isMagic(); +} + +inline bool +IsConstructing(CallReceiver call) +{ + return IsConstructing(call.base()); +} + +static JS_ALWAYS_INLINE bool +IsConstructing_PossiblyWithGivenThisObject(const Value *vp, JSObject **ctorThis) +{ +#ifdef DEBUG + JSObject *callee = &JS_CALLEE(cx, vp).toObject(); + if (callee->isFunction()) { + JSFunction *fun = callee->toFunction(); + JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0); + } else { + JS_ASSERT(callee->getClass()->construct != NULL); + } +#endif + bool isCtor = vp[1].isMagic(); + if (isCtor) + *ctorThis = vp[1].getMagicObjectOrNullPayload(); + return isCtor; +} + +inline const char * +GetFunctionNameBytes(JSContext *cx, JSFunction *fun, JSAutoByteString *bytes) +{ + if (fun->atom) + return bytes->encode(cx, fun->atom); + return js_anonymous_str; +} + +extern JSFunctionSpec function_methods[]; + +extern JSBool +Function(JSContext *cx, uintN argc, Value *vp); + +extern bool +IsBuiltinFunctionConstructor(JSFunction *fun); + +/* + * Preconditions: funobj->isInterpreted() && !funobj->isFunctionPrototype() && + * !funobj->isBoundFunction(). This is sufficient to establish that funobj has + * a non-configurable non-method .prototype data property, thought it might not + * have been resolved yet, and its value could be anything. + * + * Return the shape of the .prototype property of funobj, resolving it if + * needed. On error, return NULL. + * + * This is not safe to call on trace because it defines properties, which can + * trigger lookups that could reenter. + */ +const Shape * +LookupInterpretedFunctionPrototype(JSContext *cx, JSObject *funobj); + +static inline JSObject * +SkipScopeParent(JSObject *parent) +{ + if (!parent) + return NULL; + while (parent->isInternalScope()) + parent = parent->scopeChain(); + return parent; +} + +inline JSFunction * CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent, - bool ignoreSingletonClone /* = false */) + gc::AllocKind kind = JSFunction::FinalizeKind) { JS_ASSERT(parent); JSObject *proto = parent->getGlobal()->getOrCreateFunctionPrototype(cx); if (!proto) return NULL; + return js_CloneFunctionObject(cx, fun, parent, proto, kind); +} + +inline JSFunction * +CloneFunctionObjectIfNotSingleton(JSContext *cx, JSFunction *fun, JSObject *parent) +{ /* * For attempts to clone functions at a function definition opcode or from * a method barrier, don't perform the clone if the function has singleton - * type. CloneFunctionObject was called pessimistically, and we need to - * preserve the type's property that if it is singleton there is only a - * single object with its type in existence. + * type. This was called pessimistically, and we need to preserve the + * type's property that if it is singleton there is only a single object + * with its type in existence. */ - if (ignoreSingletonClone && fun->hasSingletonType()) { - JS_ASSERT(fun->getProto() == proto); - fun->setParent(parent); + if (fun->hasSingletonType()) { + if (!fun->setParent(cx, SkipScopeParent(parent))) + return NULL; + fun->setEnvironment(parent); return fun; } - return js_CloneFunctionObject(cx, fun, parent, proto); + return CloneFunctionObject(cx, fun, parent); } +inline JSFunction * +CloneFunctionObject(JSContext *cx, JSFunction *fun) +{ + /* + * Variant which makes an exact clone of fun, preserving parent and proto. + * Calling the above version CloneFunctionObject(cx, fun, fun->getParent()) + * is not equivalent: API clients, including XPConnect, can reparent + * objects so that fun->getGlobal() != fun->getProto()->getGlobal(). + * See ReparentWrapperIfFound. + */ + JS_ASSERT(fun->getParent() && fun->getProto()); + + if (fun->hasSingletonType()) + return fun; + + return js_CloneFunctionObject(cx, fun, fun->environment(), fun->getProto(), + JSFunction::ExtendedFinalizeKind); +} + +} /* namespace js */ + inline void JSFunction::setScript(JSScript *script_) { diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 7d97bc972d4b..7cc0e497c768 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -143,9 +143,9 @@ const uint32 Arena::ThingSizes[] = { sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */ sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16 */ sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */ - sizeof(JSFunction), /* FINALIZE_FUNCTION */ sizeof(JSScript), /* FINALIZE_SCRIPT */ sizeof(Shape), /* FINALIZE_SHAPE */ + sizeof(BaseShape), /* FINALIZE_BASE_SHAPE */ sizeof(types::TypeObject), /* FINALIZE_TYPE_OBJECT */ #if JS_HAS_XML_SUPPORT sizeof(JSXML), /* FINALIZE_XML */ @@ -170,9 +170,9 @@ const uint32 Arena::FirstThingOffsets[] = { OFFSET(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */ OFFSET(JSObject_Slots16), /* FINALIZE_OBJECT16 */ OFFSET(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */ - OFFSET(JSFunction), /* FINALIZE_FUNCTION */ OFFSET(JSScript), /* FINALIZE_SCRIPT */ OFFSET(Shape), /* FINALIZE_SHAPE */ + OFFSET(BaseShape), /* FINALIZE_BASE_SHAPE */ OFFSET(types::TypeObject), /* FINALIZE_TYPE_OBJECT */ #if JS_HAS_XML_SUPPORT OFFSET(JSXML), /* FINALIZE_XML */ @@ -258,7 +258,7 @@ Arena::staticAsserts() template inline bool -Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize) +Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize, bool background) { /* Enforce requirements on size of T. */ JS_ASSERT(thingSize % Cell::CellSize == 0); @@ -307,7 +307,7 @@ Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize) } else { if (!newFreeSpanStart) newFreeSpanStart = thing; - t->finalize(cx); + t->finalize(cx, background); JS_POISON(t, JS_FREE_PATTERN, thingSize); } } @@ -342,7 +342,7 @@ Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize) template inline void -FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind) +FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind, bool background) { /* * Release empty arenas and move non-full arenas with some free things into @@ -354,7 +354,7 @@ FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKin ArenaHeader **ap = &al->head; size_t thingSize = Arena::thingSize(thingKind); while (ArenaHeader *aheader = *ap) { - bool allClear = aheader->getArena()->finalize(cx, thingKind, thingSize); + bool allClear = aheader->getArena()->finalize(cx, thingKind, thingSize, background); if (allClear) { *ap = aheader->next; aheader->chunk()->releaseArena(aheader); @@ -379,7 +379,7 @@ FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKin * after the al->head. */ static void -FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind) +FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind, bool background) { switch(thingKind) { case FINALIZE_OBJECT0: @@ -394,31 +394,33 @@ FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind) case FINALIZE_OBJECT12_BACKGROUND: case FINALIZE_OBJECT16: case FINALIZE_OBJECT16_BACKGROUND: - case FINALIZE_FUNCTION: - FinalizeTypedArenas(cx, al, thingKind); + FinalizeTypedArenas(cx, al, thingKind, background); break; case FINALIZE_SCRIPT: - FinalizeTypedArenas(cx, al, thingKind); + FinalizeTypedArenas(cx, al, thingKind, background); break; case FINALIZE_SHAPE: - FinalizeTypedArenas(cx, al, thingKind); + FinalizeTypedArenas(cx, al, thingKind, background); + break; + case FINALIZE_BASE_SHAPE: + FinalizeTypedArenas(cx, al, thingKind, background); break; case FINALIZE_TYPE_OBJECT: - FinalizeTypedArenas(cx, al, thingKind); + FinalizeTypedArenas(cx, al, thingKind, background); break; #if JS_HAS_XML_SUPPORT case FINALIZE_XML: - FinalizeTypedArenas(cx, al, thingKind); + FinalizeTypedArenas(cx, al, thingKind, background); break; #endif case FINALIZE_STRING: - FinalizeTypedArenas(cx, al, thingKind); + FinalizeTypedArenas(cx, al, thingKind, background); break; case FINALIZE_SHORT_STRING: - FinalizeTypedArenas(cx, al, thingKind); + FinalizeTypedArenas(cx, al, thingKind, background); break; case FINALIZE_EXTERNAL_STRING: - FinalizeTypedArenas(cx, al, thingKind); + FinalizeTypedArenas(cx, al, thingKind, background); break; } } @@ -1458,7 +1460,7 @@ ArenaLists::finalizeNow(JSContext *cx, AllocKind thingKind) #ifdef JS_THREADSAFE JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE); #endif - FinalizeArenas(cx, &arenaLists[thingKind], thingKind); + FinalizeArenas(cx, &arenaLists[thingKind], thingKind, false); } inline void @@ -1470,7 +1472,6 @@ ArenaLists::finalizeLater(JSContext *cx, AllocKind thingKind) thingKind == FINALIZE_OBJECT8_BACKGROUND || thingKind == FINALIZE_OBJECT12_BACKGROUND || thingKind == FINALIZE_OBJECT16_BACKGROUND || - thingKind == FINALIZE_FUNCTION || thingKind == FINALIZE_SHORT_STRING || thingKind == FINALIZE_STRING); @@ -1501,7 +1502,7 @@ ArenaLists::finalizeLater(JSContext *cx, AllocKind thingKind) al->clear(); backgroundFinalizeState[thingKind] = BFS_RUN; } else { - FinalizeArenas(cx, al, thingKind); + FinalizeArenas(cx, al, thingKind, false); backgroundFinalizeState[thingKind] = BFS_DONE; } @@ -1521,7 +1522,7 @@ ArenaLists::backgroundFinalize(JSContext *cx, ArenaHeader *listHead) JSCompartment *comp = listHead->compartment; ArenaList finalized; finalized.head = listHead; - FinalizeArenas(cx, &finalized, thingKind); + FinalizeArenas(cx, &finalized, thingKind, true); /* * After we finish the finalization al->cursor must point to the end of @@ -1574,13 +1575,6 @@ ArenaLists::finalizeObjects(JSContext *cx) finalizeLater(cx, FINALIZE_OBJECT16_BACKGROUND); #endif - /* - * We must finalize Function instances after finalizing any other objects - * even if we use the background finalization for the latter. See comments - * in JSObject::finalizeUpvarsIfFlatClosure. - */ - finalizeLater(cx, FINALIZE_FUNCTION); - #if JS_HAS_XML_SUPPORT finalizeNow(cx, FINALIZE_XML); #endif @@ -1599,6 +1593,7 @@ void ArenaLists::finalizeShapes(JSContext *cx) { finalizeNow(cx, FINALIZE_SHAPE); + finalizeNow(cx, FINALIZE_BASE_SHAPE); finalizeNow(cx, FINALIZE_TYPE_OBJECT); } @@ -2481,18 +2476,6 @@ BeginMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind) { JSRuntime *rt = cx->runtime; - /* - * Reset the property cache's type id generator so we can compress ids. - * Same for the protoHazardShape proxy-shape standing in for all object - * prototypes having readonly or setter properties. - */ - if (rt->shapeGen & SHAPE_OVERFLOW_BIT || (rt->gcZeal() && !rt->gcCurrentCompartment)) { - JS_ASSERT(!rt->gcCurrentCompartment); - rt->gcRegenShapes = true; - rt->shapeGen = 0; - rt->protoHazardShape = 0; - } - for (GCCompartmentsIter c(rt); !c.done(); c.next()) c->purge(cx); @@ -2942,7 +2925,6 @@ GCCycle(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind) #endif rt->gcMarkAndSweep = false; - rt->gcRegenShapes = false; rt->setGCLastBytes(rt->gcBytes, gckind); rt->gcCurrentCompartment = NULL; diff --git a/js/src/jsgc.h b/js/src/jsgc.h index 190cf0cd8a85..32fa9a31ee47 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -559,7 +559,7 @@ struct Arena { } template - bool finalize(JSContext *cx, AllocKind thingKind, size_t thingSize); + bool finalize(JSContext *cx, AllocKind thingKind, size_t thingSize, bool background); }; /* The chunk header (located at the end of the chunk to preserve arena alignment). */ @@ -952,9 +952,9 @@ MapAllocToTraceKind(AllocKind thingKind) JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */ JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */ JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */ - JSTRACE_OBJECT, /* FINALIZE_FUNCTION */ JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */ JSTRACE_SHAPE, /* FINALIZE_SHAPE */ + JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */ JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */ #if JS_HAS_XML_SUPPORT /* FINALIZE_XML */ JSTRACE_XML, diff --git a/js/src/jsgcinlines.h b/js/src/jsgcinlines.h index 5fe0bb2e3dcc..c99fe42416e4 100644 --- a/js/src/jsgcinlines.h +++ b/js/src/jsgcinlines.h @@ -68,22 +68,44 @@ const size_t SLOTS_TO_THING_KIND_LIMIT = 17; /* Get the best kind to use when making an object with the given slot count. */ static inline AllocKind -GetGCObjectKind(size_t numSlots, bool isArray = false) +GetGCObjectKind(size_t numSlots) { extern AllocKind slotsToThingKind[]; - if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) { - /* - * If the object will definitely want more than the maximum number of - * fixed slots, use zero fixed slots for arrays and the maximum for - * other objects. Arrays do not use their fixed slots anymore when - * they have a slots array, while other objects will continue to do so. - */ - return isArray ? FINALIZE_OBJECT0 : FINALIZE_OBJECT16; - } + if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) + return FINALIZE_OBJECT16; return slotsToThingKind[numSlots]; } +static inline AllocKind +GetGCObjectKind(Class *clasp) +{ + if (clasp == &FunctionClass) + return JSFunction::FinalizeKind; + uint32 nslots = JSCLASS_RESERVED_SLOTS(clasp); + if (clasp->flags & JSCLASS_HAS_PRIVATE) + nslots++; + return GetGCObjectKind(nslots); +} + +/* As for GetGCObjectKind, but for dense array allocation. */ +static inline AllocKind +GetGCArrayKind(size_t numSlots) +{ + extern AllocKind slotsToThingKind[]; + + /* + * Dense arrays can use their fixed slots to hold their elements array + * (less two Values worth of ObjectElements header), but if more than the + * maximum number of fixed slots is needed then the fixed slots will be + * unused. + */ + JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2); + if (numSlots > JSObject::NELEMENTS_LIMIT || numSlots + 2 >= SLOTS_TO_THING_KIND_LIMIT) + return FINALIZE_OBJECT2; + return slotsToThingKind[numSlots + 2]; +} + static inline AllocKind GetGCObjectFixedSlotsKind(size_t numFixedSlots) { @@ -151,6 +173,27 @@ GetGCKindSlots(AllocKind thingKind) } } +static inline size_t +GetGCKindSlots(AllocKind thingKind, Class *clasp) +{ + size_t nslots = GetGCKindSlots(thingKind); + + /* An object's private data uses the space taken by its last fixed slot. */ + if (clasp->flags & JSCLASS_HAS_PRIVATE) { + JS_ASSERT(nslots > 0); + nslots--; + } + + /* + * Functions have a larger finalize kind than FINALIZE_OBJECT to reserve + * space for the extra fields in JSFunction, but have no fixed slots. + */ + if (clasp == &FunctionClass) + nslots = 0; + + return nslots; +} + static inline void GCPoke(JSContext *cx, Value oldval) { @@ -324,9 +367,6 @@ class CellIter: public CellIterImpl inline void EmptyArenaOp(Arena *arena) {} inline void EmptyCellOp(Cell *t) {} -} /* namespace gc */ -} /* namespace js */ - /* * Allocates a new GC thing. After a successful allocation the caller must * fully initialize the thing before calling any function that can potentially @@ -358,54 +398,80 @@ NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize) return static_cast(t); } +/* Alternate form which allocates a GC thing if doing so cannot trigger a GC. */ +template +inline T * +TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize) +{ + JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind)); +#ifdef JS_THREADSAFE + JS_ASSERT_IF((cx->compartment == cx->runtime->atomsCompartment), + kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING); +#endif + JS_ASSERT(!cx->runtime->gcRunning); + JS_ASSERT(!JS_THREAD_DATA(cx)->noGCOrAllocationCheck); + +#ifdef JS_GC_ZEAL + if (cx->runtime->needZealousGC()) + return NULL; +#endif + + void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize); + return static_cast(t); +} + +} /* namespace gc */ +} /* namespace js */ + inline JSObject * js_NewGCObject(JSContext *cx, js::gc::AllocKind kind) { JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST); - JSObject *obj = NewGCThing(cx, kind, js::gc::Arena::thingSize(kind)); - if (obj) - obj->earlyInit(js::gc::GetGCKindSlots(kind)); - return obj; + return js::gc::NewGCThing(cx, kind, js::gc::Arena::thingSize(kind)); +} + +inline JSObject * +js_TryNewGCObject(JSContext *cx, js::gc::AllocKind kind) +{ + JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST); + return js::gc::TryNewGCThing(cx, kind, js::gc::Arena::thingSize(kind)); } inline JSString * js_NewGCString(JSContext *cx) { - return NewGCThing(cx, js::gc::FINALIZE_STRING, sizeof(JSString)); + return js::gc::NewGCThing(cx, js::gc::FINALIZE_STRING, sizeof(JSString)); } inline JSShortString * js_NewGCShortString(JSContext *cx) { - return NewGCThing(cx, js::gc::FINALIZE_SHORT_STRING, sizeof(JSShortString)); + return js::gc::NewGCThing(cx, js::gc::FINALIZE_SHORT_STRING, sizeof(JSShortString)); } inline JSExternalString * js_NewGCExternalString(JSContext *cx) { - return NewGCThing(cx, js::gc::FINALIZE_EXTERNAL_STRING, - sizeof(JSExternalString)); -} - -inline JSFunction* -js_NewGCFunction(JSContext *cx) -{ - JSFunction *fun = NewGCThing(cx, js::gc::FINALIZE_FUNCTION, sizeof(JSFunction)); - if (fun) - fun->earlyInit(JSObject::FUN_CLASS_RESERVED_SLOTS); - return fun; + return js::gc::NewGCThing(cx, js::gc::FINALIZE_EXTERNAL_STRING, + sizeof(JSExternalString)); } inline JSScript * js_NewGCScript(JSContext *cx) { - return NewGCThing(cx, js::gc::FINALIZE_SCRIPT, sizeof(JSScript)); + return js::gc::NewGCThing(cx, js::gc::FINALIZE_SCRIPT, sizeof(JSScript)); } inline js::Shape * js_NewGCShape(JSContext *cx) { - return NewGCThing(cx, js::gc::FINALIZE_SHAPE, sizeof(js::Shape)); + return js::gc::NewGCThing(cx, js::gc::FINALIZE_SHAPE, sizeof(js::Shape)); +} + +inline js::BaseShape * +js_NewGCBaseShape(JSContext *cx) +{ + return js::gc::NewGCThing(cx, js::gc::FINALIZE_BASE_SHAPE, sizeof(js::BaseShape)); } #if JS_HAS_XML_SUPPORT diff --git a/js/src/jsgcmark.cpp b/js/src/jsgcmark.cpp index d1d06b6842c2..6bba419aaab5 100644 --- a/js/src/jsgcmark.cpp +++ b/js/src/jsgcmark.cpp @@ -230,14 +230,27 @@ MarkShape(JSTracer *trc, const MarkablePtr &shape, const char *name MarkShapeUnbarriered(trc, shape.value, name); } +void +MarkBaseShapeUnbarriered(JSTracer *trc, BaseShape *base, const char *name) +{ + JS_ASSERT(trc); + JS_ASSERT(base); + JS_SET_TRACING_NAME(trc, name); + Mark(trc, base); +} + +void +MarkBaseShape(JSTracer *trc, const MarkablePtr &base, const char *name) +{ + MarkBaseShapeUnbarriered(trc, base.value, name); +} + void MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *name) { JS_ASSERT(trc); JS_ASSERT(type); JS_SET_TRACING_NAME(trc, name); - if (type == &types::emptyTypeObject) - return; Mark(trc, type); /* @@ -247,7 +260,7 @@ MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *na * members, and we don't need to handle them here. */ if (IS_GC_MARKING_TRACER(trc)) { - if (type->singleton) + if (type->singleton && !type->lazy()) MarkObject(trc, type->singleton, "type_singleton"); if (type->interpretedFunction) MarkObject(trc, type->interpretedFunction, "type_function"); @@ -349,6 +362,20 @@ PushMarkStack(GCMarker *gcmarker, const Shape *thing) ScanShape(gcmarker, thing); } +static void +ScanBaseShape(GCMarker *gcmarker, BaseShape *base); + +void +PushMarkStack(GCMarker *gcmarker, BaseShape *thing) +{ + JS_OPT_ASSERT_IF(gcmarker->context->runtime->gcCurrentCompartment, + thing->compartment() == gcmarker->context->runtime->gcCurrentCompartment); + + /* We mark base shapes directly rather than pushing on the stack. */ + if (thing->markIfUnmarked(gcmarker->getMarkColor())) + ScanBaseShape(gcmarker, thing); +} + static void MarkAtomRange(JSTracer *trc, size_t len, JSAtom **vec, const char *name) { @@ -447,6 +474,9 @@ MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind) case JSTRACE_SHAPE: Mark(trc, reinterpret_cast(thing)); break; + case JSTRACE_BASE_SHAPE: + Mark(trc, reinterpret_cast(thing)); + break; case JSTRACE_TYPE_OBJECT: MarkTypeObjectUnbarriered(trc, reinterpret_cast(thing), "type_stack"); break; @@ -665,17 +695,19 @@ PrintPropertyGetterOrSetter(JSTracer *trc, char *buf, size_t bufsize) { JS_ASSERT(trc->debugPrinter == PrintPropertyGetterOrSetter); Shape *shape = (Shape *)trc->debugPrintArg; - PrintPropertyId(buf, bufsize, shape->propid, + PrintPropertyId(buf, bufsize, shape->propid(), trc->debugPrintIndex ? js_setter_str : js_getter_str); } +#ifdef DEBUG static void PrintPropertyMethod(JSTracer *trc, char *buf, size_t bufsize) { JS_ASSERT(trc->debugPrinter == PrintPropertyMethod); Shape *shape = (Shape *)trc->debugPrintArg; - PrintPropertyId(buf, bufsize, shape->propid, " method"); + PrintPropertyId(buf, bufsize, shape->propid(), " method"); } +#endif /* DEBUG */ static inline void ScanValue(GCMarker *gcmarker, const Value &v) @@ -695,28 +727,35 @@ static void ScanShape(GCMarker *gcmarker, const Shape *shape) { restart: - JSRuntime *rt = gcmarker->runtime; - if (rt->gcRegenShapes) - shape->shapeid = js_RegenerateShapeForGC(rt); + PushMarkStack(gcmarker, shape->base()); - if (JSID_IS_STRING(shape->propid)) - PushMarkStack(gcmarker, JSID_TO_STRING(shape->propid)); - else if (JS_UNLIKELY(JSID_IS_OBJECT(shape->propid))) - PushMarkStack(gcmarker, JSID_TO_OBJECT(shape->propid)); - - if (shape->hasGetterValue() && shape->getter()) - PushMarkStack(gcmarker, shape->getterObject()); - if (shape->hasSetterValue() && shape->setter()) - PushMarkStack(gcmarker, shape->setterObject()); - - if (shape->isMethod()) - PushMarkStack(gcmarker, &shape->methodObject()); + jsid id = shape->maybePropid(); + if (JSID_IS_STRING(id)) + PushMarkStack(gcmarker, JSID_TO_STRING(id)); + else if (JS_UNLIKELY(JSID_IS_OBJECT(id))) + PushMarkStack(gcmarker, JSID_TO_OBJECT(id)); shape = shape->previous(); if (shape && shape->markIfUnmarked(gcmarker->getMarkColor())) goto restart; } +static void +ScanBaseShape(GCMarker *gcmarker, BaseShape *base) +{ + if (base->hasGetterObject()) + PushMarkStack(gcmarker, base->getterObject()); + + if (base->hasSetterObject()) + PushMarkStack(gcmarker, base->setterObject()); + + if (base->isOwned()) + PushMarkStack(gcmarker, base->baseUnowned()); + + if (JSObject *parent = base->getObjectParent()) + PushMarkStack(gcmarker, parent); +} + static inline void ScanRope(GCMarker *gcmarker, JSRope *rope) { @@ -762,21 +801,14 @@ static const uintN LARGE_OBJECT_CHUNK_SIZE = 2048; static void ScanObject(GCMarker *gcmarker, JSObject *obj) { - if (obj->isNewborn()) - return; - types::TypeObject *type = obj->typeFromGC(); - if (type != &types::emptyTypeObject) - PushMarkStack(gcmarker, type); + PushMarkStack(gcmarker, type); - if (JSObject *parent = obj->getParent()) - PushMarkStack(gcmarker, parent); + js::Shape *shape = obj->lastProperty(); + PushMarkStack(gcmarker, shape); - /* - * Call the trace hook if necessary, and check for a newType on objects - * which are not dense arrays (dense arrays have trace hooks). - */ - Class *clasp = obj->getClass(); + /* Call the trace hook if necessary. */ + Class *clasp = shape->getObjectClass(); if (clasp->trace) { if (clasp == &ArrayClass) { if (obj->getDenseArrayInitializedLength() > LARGE_OBJECT_CHUNK_SIZE) { @@ -786,31 +818,12 @@ ScanObject(GCMarker *gcmarker, JSObject *obj) clasp->trace(gcmarker, obj); } } else { - if (obj->newType) - PushMarkStack(gcmarker, obj->newType); clasp->trace(gcmarker, obj); } - } else { - if (obj->newType) - PushMarkStack(gcmarker, obj->newType); } - if (obj->isNative()) { - js::Shape *shape = obj->lastProp; - PushMarkStack(gcmarker, shape); - - if (gcmarker->runtime->gcRegenShapes) { - /* We need to regenerate our shape if hasOwnShape(). */ - uint32 newShape = shape->shapeid; - if (obj->hasOwnShape()) { - newShape = js_RegenerateShapeForGC(gcmarker->runtime); - JS_ASSERT(newShape != shape->shapeid); - } - obj->objShape = newShape; - } - + if (shape->isNative()) { uint32 nslots = obj->slotSpan(); - JS_ASSERT(obj->slotSpan() <= obj->numSlots()); if (nslots > LARGE_OBJECT_CHUNK_SIZE) { if (gcmarker->largeStack.push(LargeMarkItem(obj))) return; @@ -851,26 +864,16 @@ ScanLargeObject(GCMarker *gcmarker, LargeMarkItem &item) void MarkChildren(JSTracer *trc, JSObject *obj) { - /* If obj has no map, it must be a newborn. */ - if (obj->isNewborn()) - return; - MarkTypeObject(trc, obj->typeFromGC(), "type"); - /* Trace universal (ops-independent) members. */ - if (!obj->isDenseArray() && obj->newType) - MarkTypeObject(trc, obj->newType, "new_type"); - if (obj->parent) - MarkObject(trc, obj->parent, "parent"); + Shape *shape = obj->lastProperty(); + MarkShapeUnbarriered(trc, shape, "shape"); - Class *clasp = obj->getClass(); + Class *clasp = shape->getObjectClass(); if (clasp->trace) clasp->trace(trc, obj); - if (obj->isNative()) { - MarkShape(trc, obj->lastProp, "shape"); - - JS_ASSERT(obj->slotSpan() <= obj->numSlots()); + if (shape->isNative()) { uint32 nslots = obj->slotSpan(); for (uint32 i = 0; i < nslots; i++) { JS_SET_TRACING_DETAILS(trc, js_PrintObjectSlotName, obj, i); @@ -921,6 +924,9 @@ MarkChildren(JSTracer *trc, JSScript *script) MarkValueRange(trc, constarray->length, constarray->vector, "consts"); } + if (script->function()) + MarkObjectUnbarriered(trc, script->function(), "function"); + if (!script->isCachedEval && script->globalObject) MarkObject(trc, script->globalObject, "object"); @@ -937,24 +943,34 @@ void MarkChildren(JSTracer *trc, const Shape *shape) { restart: - MarkId(trc, shape->propid, "propid"); - - if (shape->hasGetterValue() && shape->getter()) - MarkObjectWithPrinterUnbarriered(trc, shape->getterObject(), - PrintPropertyGetterOrSetter, shape, 0); - if (shape->hasSetterValue() && shape->setter()) - MarkObjectWithPrinterUnbarriered(trc, shape->setterObject(), - PrintPropertyGetterOrSetter, shape, 1); - - if (shape->isMethod()) - MarkObjectWithPrinterUnbarriered(trc, &shape->methodObject(), - PrintPropertyMethod, shape, 0); + MarkBaseShapeUnbarriered(trc, shape->base(), "base"); + MarkIdUnbarriered(trc, shape->maybePropid(), "propid"); shape = shape->previous(); if (shape) goto restart; } +void +MarkChildren(JSTracer *trc, BaseShape *base) +{ + if (base->hasGetterObject()) { + MarkObjectWithPrinterUnbarriered(trc, base->getterObject(), + PrintPropertyGetterOrSetter, base, 0); + } + + if (base->hasSetterObject()) { + MarkObjectWithPrinterUnbarriered(trc, base->setterObject(), + PrintPropertyGetterOrSetter, base, 0); + } + + if (base->isOwned()) + MarkBaseShapeUnbarriered(trc, base->baseUnowned(), "base"); + + if (JSObject *parent = base->getObjectParent()) + MarkObjectUnbarriered(trc, parent, "parent"); +} + static void ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type) { @@ -967,13 +983,6 @@ ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type) } } - if (type->emptyShapes) { - for (unsigned i = 0; i < FINALIZE_OBJECT_LIMIT; i++) { - if (type->emptyShapes[i]) - PushMarkStack(gcmarker, type->emptyShapes[i]); - } - } - if (type->proto) PushMarkStack(gcmarker, type->proto); @@ -982,12 +991,15 @@ ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type) PushMarkStack(gcmarker, type->newScript->shape); } + if (type->interpretedFunction) + PushMarkStack(gcmarker, type->interpretedFunction); + /* - * Don't need to trace singleton or functionScript, an object with this - * type must have already been traced and it will also hold a reference - * on the script (singleton and functionScript types cannot be the newType - * of another object). Attempts to mark type objects directly must use - * MarkTypeObject, which will itself mark these extra bits. + * Don't need to trace singleton, an object with this type must have + * already been traced and it will also hold a reference on the script + * (singleton and functionScript types cannot be the newType of another + * object). Attempts to mark type objects directly must use MarkTypeObject, + * which will itself mark these extra bits. */ } @@ -1003,17 +1015,10 @@ MarkChildren(JSTracer *trc, types::TypeObject *type) } } - if (type->emptyShapes) { - for (unsigned i = 0; i < FINALIZE_OBJECT_LIMIT; i++) { - if (type->emptyShapes[i]) - MarkShape(trc, type->emptyShapes[i], "empty_shape"); - } - } - if (type->proto) MarkObject(trc, type->proto, "type_proto"); - if (type->singleton) + if (type->singleton && !type->lazy()) MarkObject(trc, type->singleton, "type_singleton"); if (type->newScript) { @@ -1092,6 +1097,10 @@ TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind) MarkChildren(trc, static_cast(thing)); break; + case JSTRACE_BASE_SHAPE: + MarkChildren(trc, static_cast(thing)); + break; + case JSTRACE_TYPE_OBJECT: MarkChildren(trc, (types::TypeObject *)thing); break; @@ -1120,7 +1129,6 @@ JSObject::scanSlots(GCMarker *gcmarker) * Scan the fixed slots and the dynamic slots separately, to avoid * branching inside nativeGetSlot(). */ - JS_ASSERT(slotSpan() <= numSlots()); unsigned i, nslots = slotSpan(); if (slots) { unsigned nfixed = numFixedSlots(); diff --git a/js/src/jsgcmark.h b/js/src/jsgcmark.h index ef8cd5a27245..23240e0ebcfd 100644 --- a/js/src/jsgcmark.h +++ b/js/src/jsgcmark.h @@ -81,6 +81,9 @@ MarkShapeUnbarriered(JSTracer *trc, const Shape *shape, const char *name); void MarkShape(JSTracer *trc, const MarkablePtr &shape, const char *name); +void +MarkBaseShapeUnbarriered(JSTracer *trc, BaseShape *shape, const char *name); + void MarkTypeObjectUnbarriered(JSTracer *trc, types::TypeObject *type, const char *name); diff --git a/js/src/jsgcstats.cpp b/js/src/jsgcstats.cpp index f36172d25e5a..ef56cd12d0fc 100644 --- a/js/src/jsgcstats.cpp +++ b/js/src/jsgcstats.cpp @@ -134,6 +134,10 @@ GCMarker::dumpConservativeRoots() fprintf(fp, "shape"); break; } + case JSTRACE_BASE_SHAPE: { + fprintf(fp, "base_shape"); + break; + } case JSTRACE_TYPE_OBJECT: { fprintf(fp, "type_object"); break; diff --git a/js/src/jsinfer.cpp b/js/src/jsinfer.cpp index c033a98d4154..0b0e14f6464a 100644 --- a/js/src/jsinfer.cpp +++ b/js/src/jsinfer.cpp @@ -795,7 +795,7 @@ static inline const Shape * GetSingletonShape(JSContext *cx, JSObject *obj, jsid id) { const Shape *shape = obj->nativeLookup(cx, id); - if (shape && shape->hasDefaultGetterOrIsMethod() && shape->slot != SHAPE_INVALID_SLOT) + if (shape && shape->hasDefaultGetterOrIsMethod() && shape->hasSlot()) return shape; return NULL; } @@ -814,7 +814,7 @@ ScriptAnalysis::pruneTypeBarriers(JSContext *cx, uint32 offset) if (barrier->singleton) { JS_ASSERT(barrier->type.isPrimitive(JSVAL_TYPE_UNDEFINED)); const Shape *shape = GetSingletonShape(cx, barrier->singleton, barrier->singletonId); - if (shape && !barrier->singleton->nativeGetSlot(shape->slot).isUndefined()) { + if (shape && !barrier->singleton->nativeGetSlot(shape->slot()).isUndefined()) { /* * When we analyzed the script the singleton had an 'own' * property which was undefined (probably a 'var' variable @@ -1046,7 +1046,7 @@ PropertyAccess(JSContext *cx, JSScript *script, jsbytecode *pc, TypeObject *obje * even if no undefined value is ever observed at pc. */ const Shape *shape = GetSingletonShape(cx, object->singleton, id); - if (shape && object->singleton->nativeGetSlot(shape->slot).isUndefined()) + if (shape && object->singleton->nativeGetSlot(shape->slot()).isUndefined()) script->analysis()->addSingletonTypeBarrier(cx, pc, target, object->singleton, id); } } else { @@ -1164,7 +1164,7 @@ TypeConstraintCall::newType(JSContext *cx, TypeSet *source, Type type) return; } - if (obj->getFunctionPrivate()->isNative()) { + if (obj->toFunction()->isNative()) { /* * The return value and all side effects within native calls should * be dynamically monitored, except when the compiler is generating @@ -1180,7 +1180,7 @@ TypeConstraintCall::newType(JSContext *cx, TypeSet *source, Type type) * which specializes particular natives. */ - Native native = obj->getFunctionPrivate()->native(); + Native native = obj->toFunction()->native(); if (native == js::array_push) { for (size_t i = 0; i < callsite->argumentCount; i++) { @@ -1210,7 +1210,7 @@ TypeConstraintCall::newType(JSContext *cx, TypeSet *source, Type type) return; } - callee = obj->getFunctionPrivate(); + callee = obj->toFunction(); } else if (type.isTypeObject()) { callee = type.typeObject()->interpretedFunction; if (!callee) @@ -1220,7 +1220,7 @@ TypeConstraintCall::newType(JSContext *cx, TypeSet *source, Type type) return; } - if (!callee->script()->ensureHasTypes(cx, callee)) + if (!callee->script()->ensureHasTypes(cx)) return; unsigned nargs = callee->nargs; @@ -1283,9 +1283,9 @@ TypeConstraintPropagateThis::newType(JSContext *cx, TypeSet *source, Type type) if (type.isSingleObject()) { JSObject *object = type.singleObject(); - if (!object->isFunction() || !object->getFunctionPrivate()->isInterpreted()) + if (!object->isFunction() || !object->toFunction()->isInterpreted()) return; - callee = object->getFunctionPrivate(); + callee = object->toFunction(); } else if (type.isTypeObject()) { TypeObject *object = type.typeObject(); if (!object->interpretedFunction) @@ -1296,7 +1296,7 @@ TypeConstraintPropagateThis::newType(JSContext *cx, TypeSet *source, Type type) return; } - if (!callee->script()->ensureHasTypes(cx, callee)) + if (!callee->script()->ensureHasTypes(cx)) return; TypeSet *thisTypes = TypeScript::ThisTypes(callee->script()); @@ -1676,7 +1676,7 @@ types::MarkArgumentsCreated(JSContext *cx, JSScript *script) mjit::ExpandInlineFrames(cx->compartment); #endif - if (!script->ensureRanAnalysis(cx)) + if (!script->ensureRanAnalysis(cx, NULL)) return; ScriptAnalysis *analysis = script->analysis(); @@ -1961,8 +1961,6 @@ TypeSet::needsBarrier(JSContext *cx) // TypeCompartment ///////////////////////////////////////////////////////////////////// -TypeObject types::emptyTypeObject(NULL, false, true); - void TypeCompartment::init(JSContext *cx) { @@ -1976,7 +1974,7 @@ TypeObject * TypeCompartment::newTypeObject(JSContext *cx, JSScript *script, JSProtoKey key, JSObject *proto, bool unknown) { - TypeObject *object = NewGCThing(cx, gc::FINALIZE_TYPE_OBJECT, sizeof(TypeObject)); + TypeObject *object = gc::NewGCThing(cx, gc::FINALIZE_TYPE_OBJECT, sizeof(TypeObject)); if (!object) return NULL; new(object) TypeObject(proto, key == JSProto_Function, unknown); @@ -2270,7 +2268,7 @@ TypeCompartment::monitorBytecode(JSContext *cx, JSScript *script, uint32 offset, cx->compartment->types.addPendingRecompile(cx, script); /* Trigger recompilation of any inline callers. */ - if (script->hasFunction && !script->function()->hasLazyType()) + if (script->function() && !script->function()->hasLazyType()) ObjectStateChange(cx, script->function()->type(), false, true); } @@ -2365,7 +2363,7 @@ ScriptAnalysis::addTypeBarrier(JSContext *cx, const jsbytecode *pc, TypeSet *tar cx->compartment->types.addPendingRecompile(cx, script); /* Trigger recompilation of any inline callers. */ - if (script->hasFunction && !script->function()->hasLazyType()) + if (script->function() && !script->function()->hasLazyType()) ObjectStateChange(cx, script->function()->type(), false, true); } @@ -2398,7 +2396,7 @@ ScriptAnalysis::addSingletonTypeBarrier(JSContext *cx, const jsbytecode *pc, Typ if (!code.typeBarriers) { /* Trigger recompilation as for normal type barriers. */ cx->compartment->types.addPendingRecompile(cx, script); - if (script->hasFunction && !script->function()->hasLazyType()) + if (script->function() && !script->function()->hasLazyType()) ObjectStateChange(cx, script->function()->type(), false, true); } @@ -2480,7 +2478,6 @@ GetValueTypeForTable(JSContext *cx, const Value &v) { Type type = GetValueType(cx, v); JS_ASSERT(!type.isSingleObject()); - JS_ASSERT_IF(type.isTypeObject(), type.typeObject() != &emptyTypeObject); return type; } @@ -2524,7 +2521,7 @@ TypeCompartment::fixArrayType(JSContext *cx, JSObject *obj) * If the array is heterogenous, keep the existing type object, which has * unknown properties. */ - JS_ASSERT(obj->isPackedDenseArray()); + JS_ASSERT(obj->isDenseArray()); unsigned len = obj->getDenseArrayInitializedLength(); if (len == 0) @@ -2584,7 +2581,7 @@ struct types::ObjectTableKey typedef JSObject * Lookup; static inline uint32 hash(JSObject *obj) { - return (uint32) (JSID_BITS(obj->lastProperty()->propid.get()) ^ + return (uint32) (JSID_BITS(obj->lastProperty()->propid()) ^ obj->slotSpan() ^ obj->numFixedSlots() ^ ((uint32)(size_t)obj->getProto() >> 2)); } @@ -2596,8 +2593,8 @@ struct types::ObjectTableKey return false; } const Shape *shape = obj->lastProperty(); - while (!JSID_IS_EMPTY(shape->propid)) { - if (shape->propid != v.ids[shape->slot]) + while (!shape->isEmptyShape()) { + if (shape->propid() != v.ids[shape->slot()]) return false; shape = shape->previous(); } @@ -2649,11 +2646,11 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj) if (types[i].isPrimitive(JSVAL_TYPE_INT32)) { types[i] = Type::DoubleType(); const Shape *shape = baseShape; - while (!JSID_IS_EMPTY(shape->propid)) { - if (shape->slot == i) { + while (!shape->isEmptyShape()) { + if (shape->slot() == i) { Type type = Type::DoubleType(); if (!p->value.object->unknownProperties()) { - jsid id = MakeTypeId(cx, shape->propid); + jsid id = MakeTypeId(cx, shape->propid()); p->value.object->addPropertyType(cx, id, type); } break; @@ -2689,12 +2686,12 @@ TypeCompartment::fixObjectType(JSContext *cx, JSObject *obj) } const Shape *shape = baseShape; - while (!JSID_IS_EMPTY(shape->propid)) { - ids[shape->slot] = shape->propid; - types[shape->slot] = GetValueTypeForTable(cx, obj->getSlot(shape->slot)); + while (!shape->isEmptyShape()) { + ids[shape->slot()] = shape->propid(); + types[shape->slot()] = GetValueTypeForTable(cx, obj->getSlot(shape->slot())); if (!objType->unknownProperties()) { - jsid id = MakeTypeId(cx, shape->propid); - objType->addPropertyType(cx, id, types[shape->slot]); + jsid id = MakeTypeId(cx, shape->propid()); + objType->addPropertyType(cx, id, types[shape->slot()]); } shape = shape->previous(); } @@ -2754,8 +2751,8 @@ UpdatePropertyType(JSContext *cx, TypeSet *types, JSObject *obj, const Shape *sh { if (shape->hasGetterValue() || shape->hasSetterValue()) { types->addType(cx, Type::UnknownType()); - } else if (shape->hasDefaultGetterOrIsMethod() && shape->slot != SHAPE_INVALID_SLOT) { - const Value &value = obj->nativeGetSlot(shape->slot); + } else if (shape->hasDefaultGetterOrIsMethod() && shape->hasSlot()) { + const Value &value = obj->nativeGetSlot(shape->slot()); /* * Don't add initial undefined types for singleton properties that are @@ -2791,12 +2788,12 @@ TypeObject::addProperty(JSContext *cx, jsid id, Property **pprop) if (JSID_IS_VOID(id)) { /* Go through all shapes on the object to get integer-valued properties. */ const Shape *shape = singleton->lastProperty(); - while (!JSID_IS_EMPTY(shape->propid)) { - if (JSID_IS_VOID(MakeTypeId(cx, shape->propid))) + while (!shape->isEmptyShape()) { + if (JSID_IS_VOID(MakeTypeId(cx, shape->propid()))) UpdatePropertyType(cx, &base->types, singleton, shape, true); shape = shape->previous(); } - } else { + } else if (!JSID_IS_EMPTY(id)) { const Shape *shape = singleton->nativeLookup(cx, id); if (shape) UpdatePropertyType(cx, &base->types, singleton, shape, false); @@ -2830,14 +2827,14 @@ TypeObject::addDefiniteProperties(JSContext *cx, JSObject *obj) AutoEnterTypeInference enter(cx); const Shape *shape = obj->lastProperty(); - while (!JSID_IS_EMPTY(shape->propid)) { - jsid id = MakeTypeId(cx, shape->propid); - if (!JSID_IS_VOID(id) && obj->isFixedSlot(shape->slot) && - shape->slot <= (TYPE_FLAG_DEFINITE_MASK >> TYPE_FLAG_DEFINITE_SHIFT)) { + while (!shape->isEmptyShape()) { + jsid id = MakeTypeId(cx, shape->propid()); + if (!JSID_IS_VOID(id) && obj->isFixedSlot(shape->slot()) && + shape->slot() <= (TYPE_FLAG_DEFINITE_MASK >> TYPE_FLAG_DEFINITE_SHIFT)) { TypeSet *types = getProperty(cx, id, true); if (!types) return false; - types->setDefinite(shape->slot); + types->setDefinite(shape->slot()); } shape = shape->previous(); } @@ -2858,8 +2855,8 @@ TypeObject::matchDefiniteProperties(JSObject *obj) bool found = false; const Shape *shape = obj->lastProperty(); - while (!JSID_IS_EMPTY(shape->propid)) { - if (shape->slot == slot && shape->propid == prop->id) { + while (!shape->isEmptyShape()) { + if (shape->slot() == slot && shape->propid() == prop->id) { found = true; break; } @@ -2970,7 +2967,7 @@ TypeObject::setFlags(JSContext *cx, TypeObjectFlags flags) JS_ASSERT_IF(flags & OBJECT_FLAG_REENTRANT_FUNCTION, interpretedFunction->script()->reentrantOuterFunction); JS_ASSERT_IF(flags & OBJECT_FLAG_ITERATED, - singleton->flags & JSObject::ITERATED); + singleton->lastProperty()->hasObjectFlag(BaseShape::ITERATED_SINGLETON)); } this->flags |= flags; @@ -3258,7 +3255,7 @@ ScriptAnalysis::resolveNameAccess(JSContext *cx, jsid id, bool addDependency) JSAtom *atom = JSID_TO_ATOM(id); JSScript *script = this->script; - while (script->hasFunction && script->nesting()) { + while (script->function() && script->nesting()) { if (!script->ensureRanInference(cx)) return access; @@ -3407,7 +3404,6 @@ ScriptAnalysis::analyzeTypesBytecode(JSContext *cx, unsigned offset, case JSOP_DEFAULT: case JSOP_DEFAULTX: case JSOP_POPN: - case JSOP_UNBRANDTHIS: case JSOP_STARTXML: case JSOP_STARTXMLEXPR: case JSOP_DEFXMLNS: @@ -3505,7 +3501,7 @@ ScriptAnalysis::analyzeTypesBytecode(JSContext *cx, unsigned offset, case JSOP_STOP: /* If a stop is reachable then the return type may be void. */ - if (script->hasFunction) + if (script->function()) TypeScript::ReturnTypes(script)->addType(cx, Type::UndefinedType()); break; @@ -3788,7 +3784,7 @@ ScriptAnalysis::analyzeTypesBytecode(JSContext *cx, unsigned offset, case JSOP_RETURN: case JSOP_SETRVAL: - if (script->hasFunction) + if (script->function()) poppedTypes(pc, 0)->addSubset(cx, TypeScript::ReturnTypes(script)); break; @@ -4041,12 +4037,8 @@ ScriptAnalysis::analyzeTypesBytecode(JSContext *cx, unsigned offset, poppedTypes(pc, 1)->addSubset(cx, &pushed[0]); break; - case JSOP_UNBRAND: - poppedTypes(pc, 0)->addSubset(cx, &pushed[0]); - break; - case JSOP_GENERATOR: - if (script->hasFunction) { + if (script->function()) { if (script->hasGlobal()) { JSObject *proto = script->global()->getOrCreateGeneratorPrototype(cx); if (!proto) @@ -4163,7 +4155,7 @@ ScriptAnalysis::analyzeTypes(JSContext *cx) for (unsigned i = 0; i < script->nfixed; i++) TypeScript::LocalTypes(script, i)->addType(cx, Type::UndefinedType()); - TypeScriptNesting *nesting = script->hasFunction ? script->nesting() : NULL; + TypeScriptNesting *nesting = script->function() ? script->nesting() : NULL; if (nesting && nesting->parent) { /* * Check whether NAME accesses can be resolved in parent scopes, and @@ -4658,11 +4650,11 @@ AnalyzeNewScriptProperties(JSContext *cx, TypeObject *type, JSFunction *fun, JSO JSObject *funcallObj = funcallTypes->getSingleton(cx, false); JSObject *scriptObj = scriptTypes->getSingleton(cx, false); if (!funcallObj || !scriptObj || !scriptObj->isFunction() || - !scriptObj->getFunctionPrivate()->isInterpreted()) { + !scriptObj->toFunction()->isInterpreted()) { return false; } - JSFunction *function = scriptObj->getFunctionPrivate(); + JSFunction *function = scriptObj->toFunction(); JS_ASSERT(!function->script()->isInnerFunction); /* @@ -4850,7 +4842,7 @@ ScriptAnalysis::printTypes(JSContext *cx) #ifdef DEBUG - if (script->hasFunction) + if (script->function()) printf("Function"); else if (script->isCachedEval) printf("Eval"); @@ -4864,7 +4856,7 @@ ScriptAnalysis::printTypes(JSContext *cx) printf("\n this:"); TypeScript::ThisTypes(script)->print(cx); - for (unsigned i = 0; script->hasFunction && i < script->function()->nargs; i++) { + for (unsigned i = 0; script->function() && i < script->function()->nargs; i++) { printf("\n arg%u:", i); TypeScript::ArgTypes(script, i)->print(cx); } @@ -4989,7 +4981,7 @@ MarkIteratorUnknownSlow(JSContext *cx) } /* Trigger recompilation of any inline callers. */ - if (script->hasFunction && !script->function()->hasLazyType()) + if (script->function() && !script->function()->hasLazyType()) ObjectStateChange(cx, script->function()->type(), false, true); } @@ -4997,8 +4989,8 @@ void TypeMonitorCallSlow(JSContext *cx, JSObject *callee, const CallArgs &args, bool constructing) { - unsigned nargs = callee->getFunctionPrivate()->nargs; - JSScript *script = callee->getFunctionPrivate()->script(); + unsigned nargs = callee->toFunction()->nargs; + JSScript *script = callee->toFunction()->script(); if (!constructing) TypeScript::SetThis(cx, script, args.thisv()); @@ -5034,7 +5026,7 @@ TypeDynamicResult(JSContext *cx, JSScript *script, jsbytecode *pc, Type type) /* Directly update associated type sets for applicable bytecodes. */ if (js_CodeSpec[*pc].format & JOF_TYPESET) { - if (!script->ensureRanAnalysis(cx)) { + if (!script->ensureRanAnalysis(cx, NULL)) { cx->compartment->types.setPendingNukeTypes(cx); return; } @@ -5127,7 +5119,7 @@ TypeDynamicResult(JSContext *cx, JSScript *script, jsbytecode *pc, Type type) } /* Trigger recompilation of any inline callers. */ - if (script->hasFunction && !script->function()->hasLazyType()) + if (script->function() && !script->function()->hasLazyType()) ObjectStateChange(cx, script->function()->type(), false, true); } @@ -5142,7 +5134,7 @@ TypeMonitorResult(JSContext *cx, JSScript *script, jsbytecode *pc, const js::Val AutoEnterTypeInference enter(cx); - if (!script->ensureRanAnalysis(cx)) { + if (!script->ensureRanAnalysis(cx, NULL)) { cx->compartment->types.setPendingNukeTypes(cx); return; } @@ -5162,9 +5154,8 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope) { JS_ASSERT(script->types && !script->types->hasScope()); - JSFunction *fun = script->types->function; + JSFunction *fun = script->function(); - JS_ASSERT(script->hasFunction == (fun != NULL)); JS_ASSERT_IF(!fun, !script->isOuterFunction && !script->isInnerFunction); JS_ASSERT_IF(!scope, fun && !script->isInnerFunction); @@ -5183,6 +5174,14 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope) JS_ASSERT_IF(fun && scope, fun->getGlobal() == scope->getGlobal()); script->types->global = fun ? fun->getGlobal() : scope->getGlobal(); + /* + * Update the parent in the script's bindings. The bindings are created + * with a NULL parent, and fixing the parent now avoids the need to reshape + * every time a call object is created from the bindings. + */ + if (!script->bindings.setParent(cx, script->types->global)) + return false; + if (!cx->typeInferenceEnabled()) return true; @@ -5204,7 +5203,7 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope) * the script is nested inside. */ while (!scope->isCall()) - scope = scope->getParent(); + scope = scope->internalScopeChain(); CallObject &call = scope->asCall(); @@ -5234,10 +5233,10 @@ TypeScript::SetScope(JSContext *cx, JSScript *script, JSObject *scope) * the parent's call object as the most recent one, so that it is not * marked as reentrant. */ - if (!parent->ensureHasTypes(cx, parentFun)) + if (!parent->ensureHasTypes(cx)) return false; if (!parent->types->hasScope()) { - if (!SetScope(cx, parent, scope->getParent())) + if (!SetScope(cx, parent, scope->internalScopeChain())) return false; parent->nesting()->activeCall = scope; parent->nesting()->argArray = Valueify(call.argArray()); @@ -5332,7 +5331,7 @@ CheckNestingParent(JSContext *cx, JSObject *scope, JSScript *script) JS_ASSERT(parent); while (!scope->isCall() || scope->asCall().getCalleeFunction()->script() != parent) - scope = scope->getParent(); + scope = scope->internalScopeChain(); if (scope != parent->nesting()->activeCall) { parent->reentrantOuterFunction = true; @@ -5347,7 +5346,7 @@ CheckNestingParent(JSContext *cx, JSObject *scope, JSScript *script) * parent. */ if (parent->nesting()->parent) { - scope = scope->getParent(); + scope = scope->internalScopeChain(); script = parent; goto restart; } @@ -5475,23 +5474,21 @@ IgnorePushed(const jsbytecode *pc, unsigned index) } bool -JSScript::makeTypes(JSContext *cx, JSFunction *fun) +JSScript::makeTypes(JSContext *cx) { JS_ASSERT(!types); - JS_ASSERT(hasFunction == (fun != NULL)); if (!cx->typeInferenceEnabled()) { types = (TypeScript *) cx->calloc_(sizeof(TypeScript)); if (!types) return false; - new(types) TypeScript(fun); + new(types) TypeScript(); return true; } AutoEnterTypeInference enter(cx); - /* Open code for NumTypeSets since the types are not filled in yet. */ - unsigned count = 2 + (fun ? fun->nargs : 0) + nfixed + nTypeSets; + unsigned count = TypeScript::NumTypeSets(this); types = (TypeScript *) cx->calloc_(sizeof(TypeScript) + (sizeof(TypeSet) * count)); if (!types) { @@ -5499,7 +5496,7 @@ JSScript::makeTypes(JSContext *cx, JSFunction *fun) return false; } - new(types) TypeScript(fun); + new(types) TypeScript(); #ifdef DEBUG TypeSet *typeArray = types->typeArray(); @@ -5515,7 +5512,7 @@ JSScript::makeTypes(JSContext *cx, JSFunction *fun) InferSpew(ISpewOps, "typeSet: %sT%p%s this #%u", InferSpewColor(thisTypes), thisTypes, InferSpewColorReset(), id()); - unsigned nargs = hasFunction ? function()->nargs : 0; + unsigned nargs = function() ? function()->nargs : 0; for (unsigned i = 0; i < nargs; i++) { TypeSet *types = TypeScript::ArgTypes(this, i); InferSpew(ISpewOps, "typeSet: %sT%p%s arg%u #%u", @@ -5558,9 +5555,7 @@ JSScript::makeAnalysis(JSContext *cx) bool JSScript::typeSetFunction(JSContext *cx, JSFunction *fun, bool singleton) { - hasFunction = true; - if (fun->isHeavyweight()) - isHeavyweightFunction = true; + function_ = fun; if (!cx->typeInferenceEnabled()) return true; @@ -5661,7 +5656,7 @@ JSObject::splicePrototype(JSContext *cx, JSObject *proto) } if (!cx->typeInferenceEnabled()) { - TypeObject *type = proto ? proto->getNewType(cx) : &emptyTypeObject; + TypeObject *type = proto ? proto->getNewType(cx) : cx->compartment->getEmptyType(cx); if (!type) return false; type_ = type; @@ -5707,8 +5702,8 @@ JSObject::makeLazyType(JSContext *cx) type->singleton = this; - if (isFunction() && getFunctionPrivate() && getFunctionPrivate()->isInterpreted()) { - type->interpretedFunction = getFunctionPrivate(); + if (isFunction() && toFunction()->isInterpreted()) { + type->interpretedFunction = toFunction(); JSScript *script = type->interpretedFunction->script(); if (script->createdArgs) type->flags |= OBJECT_FLAG_CREATED_ARGUMENTS; @@ -5718,7 +5713,7 @@ JSObject::makeLazyType(JSContext *cx) type->flags |= OBJECT_FLAG_REENTRANT_FUNCTION; } - if (flags & ITERATED) + if (lastProperty()->hasObjectFlag(BaseShape::ITERATED_SINGLETON)) type->flags |= OBJECT_FLAG_ITERATED; #if JS_HAS_XML_SUPPORT @@ -5730,12 +5725,11 @@ JSObject::makeLazyType(JSContext *cx) type->markUnknown(cx); #endif - if (clasp->ext.equality) + if (getClass()->ext.equality) type->flags |= OBJECT_FLAG_SPECIAL_EQUALITY; if (type->unknownProperties()) { type_ = type; - flags &= ~LAZY_TYPE; return; } @@ -5745,24 +5739,101 @@ JSObject::makeLazyType(JSContext *cx) | OBJECT_FLAG_NON_TYPED_ARRAY; type_ = type; - flags &= ~LAZY_TYPE; } -void -JSObject::makeNewType(JSContext *cx, JSFunction *fun, bool unknown) +/* static */ inline HashNumber +TypeObjectEntry::hash(JSObject *proto) { - JS_ASSERT(!newType); + return PointerHasher::hash(proto); +} + +/* static */ inline bool +TypeObjectEntry::match(TypeObject *key, JSObject *lookup) +{ + return key->proto == lookup; +} + +#ifdef DEBUG +bool +JSObject::hasNewType(TypeObject *type) +{ + TypeObjectSet &table = compartment()->newTypeObjects; + + if (!table.initialized()) + return false; + + TypeObjectSet::Ptr p = table.lookup(this); + return p && *p == type; +} +#endif /* DEBUG */ + +bool +JSObject::setNewTypeUnknown(JSContext *cx) +{ + if (!setFlag(cx, js::BaseShape::NEW_TYPE_UNKNOWN)) + return false; + + /* + * If the object already has a new type, mark that type as unknown. It will + * not have the SETS_MARKED_UNKNOWN bit set, so may require a type set + * crawl if prototypes of the object change dynamically in the future. + */ + TypeObjectSet &table = cx->compartment->newTypeObjects; + if (table.initialized()) { + if (TypeObjectSet::Ptr p = table.lookup(this)) + MarkTypeObjectUnknownProperties(cx, *p); + } + + return true; +} + +TypeObject * +JSObject::getNewType(JSContext *cx, JSFunction *fun) +{ + if (!setDelegate(cx)) + return NULL; + + TypeObjectSet &table = cx->compartment->newTypeObjects; + + if (!table.initialized() && !table.init()) + return NULL; + + TypeObjectSet::AddPtr p = table.lookupForAdd(this); + if (p) { + TypeObject *type = *p; + + /* + * If set, the type's newScript indicates the script used to create + * all objects in existence which have this type. If there are objects + * in existence which are not created by calling 'new' on newScript, + * we must clear the new script information from the type and will not + * be able to assume any definite properties for instances of the type. + * This case is rare, but can happen if, for example, two scripted + * functions have the same value for their 'prototype' property, or if + * Object.create is called with a prototype object that is also the + * 'prototype' property of some scripted function. + */ + if (type->newScript && type->newScript->fun != fun) + type->clearNewScript(cx); + + if (cx->compartment->needsBarrier()) + TypeObject::readBarrier(type); + + return type; + } + + bool markUnknown = lastProperty()->hasObjectFlag(BaseShape::NEW_TYPE_UNKNOWN); TypeObject *type = cx->compartment->types.newTypeObject(cx, NULL, - JSProto_Object, this, unknown); + JSProto_Object, this, markUnknown); if (!type) - return; + return NULL; - newType.init(type); - setDelegate(); + if (!table.relookupOrAdd(p, this, type)) + return NULL; if (!cx->typeInferenceEnabled()) - return; + return type; AutoEnterTypeInference enter(cx); @@ -5783,7 +5854,7 @@ JSObject::makeNewType(JSContext *cx, JSFunction *fun, bool unknown) type->flags |= OBJECT_FLAG_UNKNOWN_MASK; #endif - if (clasp->ext.equality) + if (getClass()->ext.equality) type->flags |= OBJECT_FLAG_SPECIAL_EQUALITY; /* @@ -5796,6 +5867,40 @@ JSObject::makeNewType(JSContext *cx, JSFunction *fun, bool unknown) */ if (type->unknownProperties()) type->flags |= OBJECT_FLAG_SETS_MARKED_UNKNOWN; + + return type; +} + +TypeObject * +JSCompartment::getLazyType(JSContext *cx, JSObject *proto) +{ + TypeObjectSet &table = cx->compartment->lazyTypeObjects; + + if (!table.initialized() && !table.init()) + return NULL; + + TypeObjectSet::AddPtr p = table.lookupForAdd(proto); + if (p) { + TypeObject *type = *p; + JS_ASSERT(type->lazy()); + + if (cx->compartment->needsBarrier()) + TypeObject::readBarrier(type); + + return type; + } + + TypeObject *type = cx->compartment->types.newTypeObject(cx, NULL, + JSProto_Object, proto, false); + if (!type) + return NULL; + + if (!table.relookupOrAdd(p, proto, type)) + return NULL; + + type->singleton = (JSObject *) TypeObject::LAZY_SINGLETON; + + return type; } ///////////////////////////////////////////////////////////////////// @@ -5847,15 +5952,6 @@ TypeSet::sweep(JSContext *cx, JSCompartment *compartment) flags &= ~TYPE_FLAG_PROPAGATED_PROPERTY; } -inline void -JSObject::revertLazyType() -{ - JS_ASSERT(hasSingletonType() && !hasLazyType()); - JS_ASSERT_IF(type_->proto, type_->proto->newType); - flags |= LAZY_TYPE; - type_ = (type_->proto) ? type_->proto->newType : &emptyTypeObject; -} - inline void TypeObject::clearProperties() { @@ -5880,7 +5976,6 @@ TypeObject::sweep(JSContext *cx) contribution = 0; if (singleton) { - JS_ASSERT(!emptyShapes); JS_ASSERT(!newScript); /* @@ -5889,25 +5984,10 @@ TypeObject::sweep(JSContext *cx) */ clearProperties(); - if (!isMarked()) { - /* - * Singleton objects do not hold strong references on their types. - * When removing the type, however, we need to fixup the singleton - * so that it has a lazy type again. The generic 'new' type for the - * proto must be live, since the type's prototype and its 'new' - * type are both strong references. - */ - JS_ASSERT_IF(singleton->isMarked() && proto, - proto->isMarked() && proto->newType->isMarked()); - singleton->revertLazyType(); - } - return; } if (!isMarked()) { - if (emptyShapes) - Foreground::free_(emptyShapes); if (newScript) Foreground::free_(newScript); return; @@ -5991,9 +6071,6 @@ struct SweepTypeObjectOp void SweepTypeObjects(JSContext *cx, JSCompartment *compartment) { - JS_ASSERT(!emptyTypeObject.emptyShapes); - JS_ASSERT(!emptyTypeObject.newScript); - SweepTypeObjectOp op(cx); gc::ForEachArenaAndCell(compartment, gc::FINALIZE_TYPE_OBJECT, gc::EmptyArenaOp, op); } @@ -6077,6 +6154,18 @@ TypeCompartment::sweep(JSContext *cx) pendingCapacity = 0; } +void +JSCompartment::sweepNewTypeObjectTable(JSContext *cx, TypeObjectSet &table) +{ + if (table.initialized()) { + for (TypeObjectSet::Enum e(table); !e.empty(); e.popFront()) { + TypeObject *type = e.front(); + if (!type->isMarked()) + e.removeFront(); + } + } +} + TypeCompartment::~TypeCompartment() { if (pendingArray) @@ -6275,7 +6364,7 @@ JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats, J * every GC. The type object is normally destroyed too, but we don't * charge this to 'temporary' as this is not for GC heap values. */ - JS_ASSERT(!object->newScript && !object->emptyShapes); + JS_ASSERT(!object->newScript); return; } @@ -6290,12 +6379,6 @@ JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats, J stats->objects += mallocSizeOf(object->newScript, computedSize); } - if (object->emptyShapes) { - stats->emptyShapes += - mallocSizeOf(object->emptyShapes, - sizeof(EmptyShape*) * gc::FINALIZE_OBJECT_LIMIT); - } - /* * This counts memory that is in the temp pool but gets attributed * elsewhere. See JS_GetTypeInferenceMemoryStats for more details. diff --git a/js/src/jsinfer.h b/js/src/jsinfer.h index b760af2b8675..9341ad89cbbd 100644 --- a/js/src/jsinfer.h +++ b/js/src/jsinfer.h @@ -731,19 +731,16 @@ struct TypeObject : gc::Cell */ HeapPtrObject singleton; - /* Lazily filled array of empty shapes for each size of objects with this type. */ - HeapPtr *emptyShapes; + /* + * Value held by singleton if this is a standin type for a singleton JS + * object whose type has not been constructed yet. + */ + static const size_t LAZY_SINGLETON = 1; + bool lazy() const { return singleton == (JSObject *) LAZY_SINGLETON; } /* Flags for this object. */ TypeObjectFlags flags; - /* - * If non-NULL, objects of this type have always been constructed using - * 'new' on the specified script, which adds some number of properties to - * the object in a definite order before the object escapes. - */ - HeapPtr newScript; - /* * Estimate of the contribution of this object to the type sets it appears in. * This is the sum of the sizes of those sets at the point when the object @@ -758,6 +755,13 @@ struct TypeObject : gc::Cell uint32 contribution; static const uint32 CONTRIBUTION_LIMIT = 2000; + /* + * If non-NULL, objects of this type have always been constructed using + * 'new' on the specified script, which adds some number of properties to + * the object in a definite order before the object escapes. + */ + HeapPtr newScript; + /* * Properties of this object. This may contain JSID_VOID, representing the * types of all integer indexes of the object, and/or JSID_EMPTY, holding @@ -793,6 +797,10 @@ struct TypeObject : gc::Cell /* If this is an interpreted function, the function object. */ HeapPtrFunction interpretedFunction; +#if JS_BITS_PER_WORD == 32 + void *padding; +#endif + inline TypeObject(JSObject *proto, bool isFunction, bool unknown); bool isFunction() { return !!(flags & OBJECT_FLAG_FUNCTION); } @@ -812,16 +820,6 @@ struct TypeObject : gc::Cell return !!(flags & OBJECT_FLAG_UNKNOWN_PROPERTIES); } - /* - * Return an immutable, shareable, empty shape with the same clasp as this - * and the same slotSpan as this had when empty. - * - * If |this| is the scope of an object |proto|, the resulting scope can be - * used as the scope of a new object whose prototype is |proto|. - */ - inline bool canProvideEmptyShape(js::Class *clasp); - inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp, gc::AllocKind kind); - /* * Get or create a property of this object. Only call this for properties which * a script accesses explicitly. 'assign' indicates whether this is for an @@ -874,10 +872,11 @@ struct TypeObject : gc::Cell * object pending deletion is released when weak references are sweeped * from all the compartment's type objects. */ - void finalize(JSContext *cx) {} + void finalize(JSContext *cx, bool background) {} static inline void writeBarrierPre(TypeObject *type); static inline void writeBarrierPost(TypeObject *type, void *addr); + static inline void readBarrier(TypeObject *type); private: inline uint32 basePropertyCount() const; @@ -888,8 +887,18 @@ struct TypeObject : gc::Cell } }; -/* Global singleton for the generic type of objects with no prototype. */ -extern TypeObject emptyTypeObject; +/* + * Entries for the per-compartment set of type objects which are the default + * 'new' or the lazy types of some prototype. + */ +struct TypeObjectEntry +{ + typedef JSObject *Lookup; + + static inline HashNumber hash(JSObject *base); + static inline bool match(TypeObject *key, JSObject *lookup); +}; +typedef HashSet TypeObjectSet; /* * Call to mark a script's arguments as having been created, recompile any @@ -1020,9 +1029,6 @@ class TypeScript /* Analysis information for the script, cleared on each GC. */ analyze::ScriptAnalysis *analysis; - /* Function for the script, if it has one. */ - HeapPtrFunction function; - /* * Information about the scope in which a script executes. This information * is not set until the script has executed at least once and SetScope @@ -1041,8 +1047,7 @@ class TypeScript /* Dynamic types generated at points within this script. */ TypeResult *dynamicList; - inline TypeScript(JSFunction *fun); - inline ~TypeScript(); + inline TypeScript(); bool hasScope() { return size_t(global.get()) != GLOBAL_MISSING_SCOPE; } @@ -1270,4 +1275,8 @@ void TypeFailure(JSContext *cx, const char *fmt, ...); } /* namespace types */ } /* namespace js */ +namespace JS { + template<> class AnchorPermitted { }; +} + #endif // jsinfer_h___ diff --git a/js/src/jsinferinlines.h b/js/src/jsinferinlines.h index dbec2891ab1f..9a2fbab6402a 100644 --- a/js/src/jsinferinlines.h +++ b/js/src/jsinferinlines.h @@ -321,10 +321,10 @@ TypeMonitorCall(JSContext *cx, const js::CallArgs &args, bool constructing) JSObject *callee = &args.callee(); if (callee->isFunction()) { - JSFunction *fun = callee->getFunctionPrivate(); + JSFunction *fun = callee->toFunction(); if (fun->isInterpreted()) { JSScript *script = fun->script(); - if (!script->ensureRanAnalysis(cx, fun, callee->getParent())) + if (!script->ensureRanAnalysis(cx, fun->environment())) return; if (cx->typeInferenceEnabled()) TypeMonitorCallSlow(cx, callee, args, constructing); @@ -460,15 +460,9 @@ UseNewTypeAtEntry(JSContext *cx, StackFrame *fp) ///////////////////////////////////////////////////////////////////// inline -TypeScript::TypeScript(JSFunction *fun) - : function(fun), - global((js::GlobalObject *) GLOBAL_MISSING_SCOPE) -{ -} - -inline -TypeScript::~TypeScript() +TypeScript::TypeScript() { + this->global = (js::GlobalObject *) GLOBAL_MISSING_SCOPE; } /* static */ inline unsigned @@ -700,8 +694,6 @@ TypeScript::SetArgument(JSContext *cx, JSScript *script, unsigned arg, const js: void TypeScript::trace(JSTracer *trc) { - if (function) - gc::MarkObject(trc, function, "script_fun"); if (hasScope() && global) gc::MarkObject(trc, global, "script_global"); @@ -1268,7 +1260,7 @@ inline void TypeObject::writeBarrierPre(TypeObject *type) { #ifdef JSGC_INCREMENTAL - if (!type || type == &js::types::emptyTypeObject) + if (!type) return; JSCompartment *comp = type->compartment(); @@ -1282,6 +1274,17 @@ TypeObject::writeBarrierPost(TypeObject *type, void *addr) { } +inline void +TypeObject::readBarrier(TypeObject *type) +{ +#ifdef JSGC_INCREMENTAL + JSCompartment *comp = type->compartment(); + JS_ASSERT(comp->needsBarrier()); + + MarkTypeObjectUnbarriered(comp->barrierTracer(), type, "read barrier"); +#endif +} + inline void TypeNewScript::writeBarrierPre(TypeNewScript *newScript) { @@ -1317,15 +1320,15 @@ Property::Property(const Property &o) } } /* namespace js::types */ inline bool -JSScript::ensureHasTypes(JSContext *cx, JSFunction *fun) +JSScript::ensureHasTypes(JSContext *cx) { - return types || makeTypes(cx, fun); + return types || makeTypes(cx); } inline bool -JSScript::ensureRanAnalysis(JSContext *cx, JSFunction *fun, JSObject *scope) +JSScript::ensureRanAnalysis(JSContext *cx, JSObject *scope) { - if (!ensureHasTypes(cx, fun)) + if (!ensureHasTypes(cx)) return false; if (!types->hasScope() && !js::types::TypeScript::SetScope(cx, this, scope)) return false; @@ -1338,7 +1341,7 @@ JSScript::ensureRanAnalysis(JSContext *cx, JSFunction *fun, JSObject *scope) inline bool JSScript::ensureRanInference(JSContext *cx) { - if (!ensureRanAnalysis(cx)) + if (!ensureRanAnalysis(cx, NULL)) return false; if (!analysis()->ranInference()) { js::types::AutoEnterTypeInference enter(cx); @@ -1376,4 +1379,12 @@ js::analyze::ScriptAnalysis::addPushedType(JSContext *cx, uint32 offset, uint32 pushed->addType(cx, type); } +inline js::types::TypeObject * +JSCompartment::getEmptyType(JSContext *cx) +{ + if (!emptyTypeObject) + emptyTypeObject = types.newTypeObject(cx, NULL, JSProto_Object, NULL, true); + return emptyTypeObject; +} + #endif // jsinferinlines_h___ diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 6d0496700946..5048617d92c7 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -190,7 +190,7 @@ js::GetBlockChain(JSContext *cx, StackFrame *fp) else if (op == JSOP_ENTERBLOCK) blockChain = script->getObject(indexBase + GET_INDEX(pc)); else if (op == JSOP_LEAVEBLOCK || op == JSOP_LEAVEBLOCKEXPR) - blockChain = blockChain->getParent(); + blockChain = blockChain->getStaticBlockScopeChain(); else if (op == JSOP_BLOCKCHAIN) blockChain = script->getObject(indexBase + GET_INDEX(pc)); else if (op == JSOP_NULLBLOCKCHAIN) @@ -295,7 +295,7 @@ GetScopeChainFull(JSContext *cx, StackFrame *fp, JSObject *blockChain) */ limitClone = &fp->scopeChain(); while (limitClone->isWith()) - limitClone = limitClone->getParent(); + limitClone = limitClone->internalScopeChain(); JS_ASSERT(limitClone); /* @@ -342,7 +342,7 @@ GetScopeChainFull(JSContext *cx, StackFrame *fp, JSObject *blockChain) JSObject *newChild = innermostNewChild; for (;;) { JS_ASSERT(newChild->getProto() == sharedBlock); - sharedBlock = sharedBlock->getParent(); + sharedBlock = sharedBlock->getStaticBlockScopeChain(); /* Sometimes limitBlock will be NULL, so check that first. */ if (sharedBlock == limitBlock || !sharedBlock) @@ -353,10 +353,12 @@ GetScopeChainFull(JSContext *cx, StackFrame *fp, JSObject *blockChain) if (!clone) return NULL; - newChild->setParent(clone); + if (!newChild->setInternalScopeChain(cx, clone)) + return NULL; newChild = clone; } - newChild->setParent(&fp->scopeChain()); + if (!newChild->setInternalScopeChain(cx, &fp->scopeChain())) + return NULL; /* @@ -422,7 +424,7 @@ js::BoxNonStrictThis(JSContext *cx, const CallReceiver &call) JS_ASSERT(!thisv.isMagic()); #ifdef DEBUG - JSFunction *fun = call.callee().isFunction() ? call.callee().getFunctionPrivate() : NULL; + JSFunction *fun = call.callee().isFunction() ? call.callee().toFunction() : NULL; JS_ASSERT_IF(fun && fun->isInterpreted(), !fun->inStrictMode()); #endif @@ -509,12 +511,11 @@ js::OnUnknownMethod(JSContext *cx, Value *vp) vp[0] = IdToValue(id); } #endif - obj = js_NewGCObject(cx, FINALIZE_OBJECT2); + + obj = NewObjectWithClassProto(cx, &js_NoSuchMethodClass, NULL, NULL); if (!obj) return false; - obj->init(cx, &js_NoSuchMethodClass, &emptyTypeObject, NULL, NULL, false); - obj->setSharedNonNativeMap(); obj->setSlot(JSSLOT_FOUND_FUNCTION, tvr.value()); obj->setSlot(JSSLOT_SAVED_ID, vp[0]); vp[0].setObject(*obj); @@ -619,7 +620,7 @@ js::InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct) } /* Invoke native functions. */ - JSFunction *fun = callee.getFunctionPrivate(); + JSFunction *fun = callee.toFunction(); JS_ASSERT_IF(construct, !fun->isConstructor()); if (fun->isNative()) return CallJSNative(cx, fun->u.n.native, args); @@ -766,7 +767,7 @@ js::ExecuteKernel(JSContext *cx, JSScript *script, JSObject &scopeChain, const V Probes::startExecution(cx, script); - if (!script->ensureRanAnalysis(cx, NULL, &scopeChain)) + if (!script->ensureRanAnalysis(cx, &scopeChain)) return false; TypeScript::SetThis(cx, script, fp->thisValue()); @@ -801,8 +802,10 @@ js::Execute(JSContext *cx, JSScript *script, JSObject &scopeChainArg, Value *rva JS_ASSERT(!scopeChain->getOps()->defineProperty); /* The VAROBJFIX option makes varObj == globalObj in global code. */ - if (!cx->hasRunOption(JSOPTION_VAROBJFIX)) - scopeChain->makeVarObj(); + if (!cx->hasRunOption(JSOPTION_VAROBJFIX)) { + if (!scopeChain->setVarObj(cx)) + return false; + } /* Use the scope chain as 'this', modulo outerization. */ JSObject *thisObj = scopeChain->thisObject(cx); @@ -922,7 +925,6 @@ js::LooselyEqual(JSContext *cx, const Value &lval, const Value &rval, JSBool *re if (lval.isObject()) { JSObject *l = &lval.toObject(); JSObject *r = &rval.toObject(); - l->assertSpecialEqualitySynced(); if (JSEqualityOp eq = l->getClass()->ext.equality) { return eq(cx, l, &rval, result); @@ -1073,7 +1075,7 @@ js::InvokeConstructorKernel(JSContext *cx, const CallArgs &argsRef) JSObject *callee = &args.callee(); Class *clasp = callee->getClass(); if (clasp == &FunctionClass) { - JSFunction *fun = callee->getFunctionPrivate(); + JSFunction *fun = callee->toFunction(); if (fun->isConstructor()) { Probes::calloutBegin(cx, fun); @@ -1117,7 +1119,7 @@ js::InvokeConstructorWithGivenThis(JSContext *cx, JSObject *thisobj, const Value Class *clasp = callee.getClass(); JSFunction *fun; bool ok; - if (clasp == &FunctionClass && (fun = callee.getFunctionPrivate())->isConstructor()) { + if (clasp == &FunctionClass && (fun = callee.toFunction())->isConstructor()) { args.thisv().setMagicWithObjectOrNullPayload(thisobj); Probes::calloutBegin(cx, fun); ok = CallJSNativeConstructor(cx, fun->u.n.native, args); @@ -1205,7 +1207,7 @@ LeaveWith(JSContext *cx) JS_ASSERT(withobj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp())); JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0); withobj->setPrivate(NULL); - cx->fp()->setScopeChainNoCallObj(*withobj->getParent()); + cx->fp()->setScopeChainNoCallObj(*withobj->internalScopeChain()); } bool @@ -1411,9 +1413,9 @@ inline InterpreterFrames::~InterpreterFrames() */ #if defined DEBUG && !defined JS_THREADSAFE -# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \ +# define ASSERT_VALID_PROPERTY_CACHE_HIT(obj,pobj,entry) \ JS_BEGIN_MACRO \ - if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \ + if (!AssertValidPropertyCacheHit(cx, script, regs, obj, pobj, \ entry)) { \ goto error; \ } \ @@ -1421,17 +1423,14 @@ inline InterpreterFrames::~InterpreterFrames() static bool AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, FrameRegs& regs, - ptrdiff_t pcoff, JSObject *start, JSObject *found, + JSObject *start, JSObject *found, PropertyCacheEntry *entry) { uint32 sample = cx->runtime->gcNumber; PropertyCacheEntry savedEntry = *entry; JSAtom *atom; - if (pcoff >= 0) - GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom); - else - atom = cx->runtime->atomState.lengthAtom; + GET_ATOM_FROM_BYTECODE(script, regs.pc, 0, atom); JSObject *obj, *pobj; JSProperty *prop; @@ -1452,34 +1451,13 @@ AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, FrameRegs& regs, JS_ASSERT(pobj == found); const Shape *shape = (Shape *) prop; - if (entry->vword.isSlot()) { - JS_ASSERT(entry->vword.toSlot() == shape->slot); - JS_ASSERT(!shape->isMethod()); - } else if (entry->vword.isShape()) { - JS_ASSERT(entry->vword.toShape() == shape); - JS_ASSERT_IF(shape->isMethod(), - shape->methodObject() == pobj->nativeGetSlot(shape->slot).toObject()); - } else { - Value v; - JS_ASSERT(entry->vword.isFunObj()); - JS_ASSERT(!entry->vword.isNull()); - JS_ASSERT(pobj->brandedOrHasMethodBarrier()); - JS_ASSERT(shape->hasDefaultGetterOrIsMethod()); - JS_ASSERT(pobj->containsSlot(shape->slot)); - v = pobj->nativeGetSlot(shape->slot); - JS_ASSERT(entry->vword.toFunObj() == v.toObject()); - - if (shape->isMethod()) { - JS_ASSERT(js_CodeSpec[*regs.pc].format & JOF_CALLOP); - JS_ASSERT(shape->methodObject() == v.toObject()); - } - } + JS_ASSERT(entry->prop == shape); return true; } #else -# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0) +# define ASSERT_VALID_PROPERTY_CACHE_HIT(obj,pobj,entry) ((void) 0) #endif /* @@ -1995,7 +1973,7 @@ BEGIN_CASE(JSOP_POPN) JS_ASSERT_IF(obj, OBJ_BLOCK_DEPTH(cx, obj) + OBJ_BLOCK_COUNT(cx, obj) <= (size_t) (regs.sp - regs.fp()->base())); - for (obj = ®s.fp()->scopeChain(); obj; obj = obj->getParent()) { + for (obj = ®s.fp()->scopeChain(); obj; obj = obj->scopeChain()) { if (!obj->isBlock() || !obj->isWith()) continue; if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, regs.fp())) @@ -2353,10 +2331,10 @@ END_CASE(JSOP_PICK) JS_BEGIN_MACRO \ if (shape->isDataDescriptor() && shape->hasDefaultGetter()) { \ /* Fast path for Object instance properties. */ \ - JS_ASSERT((shape)->slot != SHAPE_INVALID_SLOT || \ + JS_ASSERT((shape)->slot() != SHAPE_INVALID_SLOT || \ !shape->hasDefaultSetter()); \ - if (((shape)->slot != SHAPE_INVALID_SLOT)) \ - *(vp) = (pobj)->nativeGetSlot((shape)->slot); \ + if (((shape)->slot() != SHAPE_INVALID_SLOT)) \ + *(vp) = (pobj)->nativeGetSlot((shape)->slot()); \ else \ (vp)->setUndefined(); \ } else { \ @@ -2368,8 +2346,8 @@ END_CASE(JSOP_PICK) #define NATIVE_SET(cx,obj,shape,entry,strict,vp) \ JS_BEGIN_MACRO \ if (shape->hasDefaultSetter() && \ - (shape)->slot != SHAPE_INVALID_SLOT && \ - !(obj)->brandedOrHasMethodBarrier()) { \ + (shape)->hasSlot() && \ + !(shape)->isMethod()) { \ /* Fast path for, e.g., plain Object instance properties. */ \ (obj)->nativeSetSlotWithType(cx, shape, *vp); \ } else { \ @@ -2435,7 +2413,7 @@ BEGIN_CASE(JSOP_BINDNAME) * forms. */ obj = ®s.fp()->scopeChain(); - if (!obj->getParent()) + if (obj->isGlobal()) break; PropertyCacheEntry *entry; @@ -2443,7 +2421,7 @@ BEGIN_CASE(JSOP_BINDNAME) JSAtom *atom; JS_PROPERTY_CACHE(cx).test(cx, regs.pc, obj, obj2, entry, atom); if (!atom) { - ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry); + ASSERT_VALID_PROPERTY_CACHE_HIT(obj, obj2, entry); break; } @@ -2933,57 +2911,17 @@ BEGIN_CASE(JSOP_VOID) regs.sp[-1].setUndefined(); END_CASE(JSOP_VOID) -{ - /* - * Property incops are followed by an equivalent decomposed version, - * and we have the option of running either. If type inference is enabled - * we run the decomposed version to accumulate observed types and - * overflows which inference can process, otherwise we run the fat opcode - * as doing so is faster and is what the tracer needs while recording. - */ - JSObject *obj; - JSAtom *atom; - jsid id; - jsint i; - BEGIN_CASE(JSOP_INCELEM) BEGIN_CASE(JSOP_DECELEM) BEGIN_CASE(JSOP_ELEMINC) BEGIN_CASE(JSOP_ELEMDEC) - - if (cx->typeInferenceEnabled()) { - len = JSOP_INCELEM_LENGTH; - DO_NEXT_OP(len); - } - - /* - * Delay fetching of id until we have the object to ensure the proper - * evaluation order. See bug 372331. - */ - id = JSID_VOID; - i = -2; - goto fetch_incop_obj; + /* No-op */ +END_CASE(JSOP_INCELEM) BEGIN_CASE(JSOP_INCPROP) BEGIN_CASE(JSOP_DECPROP) BEGIN_CASE(JSOP_PROPINC) BEGIN_CASE(JSOP_PROPDEC) - - if (cx->typeInferenceEnabled()) { - len = JSOP_INCPROP_LENGTH; - DO_NEXT_OP(len); - } - - LOAD_ATOM(0, atom); - id = ATOM_TO_JSID(atom); - i = -1; - - fetch_incop_obj: - FETCH_OBJECT(cx, i, obj); - if (JSID_IS_VOID(id)) - FETCH_ELEMENT_ID(obj, -1, id); - goto do_incop; - BEGIN_CASE(JSOP_INCNAME) BEGIN_CASE(JSOP_DECNAME) BEGIN_CASE(JSOP_NAMEINC) @@ -2992,114 +2930,8 @@ BEGIN_CASE(JSOP_INCGNAME) BEGIN_CASE(JSOP_DECGNAME) BEGIN_CASE(JSOP_GNAMEINC) BEGIN_CASE(JSOP_GNAMEDEC) -{ - if (cx->typeInferenceEnabled()) { - len = JSOP_INCNAME_LENGTH; - DO_NEXT_OP(len); - } - - obj = ®s.fp()->scopeChain(); - - bool global = (js_CodeSpec[op].format & JOF_GNAME); - if (global) - obj = obj->getGlobal(); - - JSObject *obj2; - PropertyCacheEntry *entry; - JS_PROPERTY_CACHE(cx).test(cx, regs.pc, obj, obj2, entry, atom); - if (!atom) { - ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry); - if (obj == obj2 && entry->vword.isSlot()) { - uint32 slot = entry->vword.toSlot(); - const Value &rref = obj->nativeGetSlot(slot); - int32_t tmp; - if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) { - int32_t inc = tmp + ((js_CodeSpec[op].format & JOF_INC) ? 1 : -1); - if (!(js_CodeSpec[op].format & JOF_POST)) - tmp = inc; - obj->nativeSetSlot(slot, Int32Value(inc)); - PUSH_INT32(tmp); - len = JSOP_INCNAME_LENGTH + GetDecomposeLength(regs.pc, JSOP_INCNAME_LENGTH); - DO_NEXT_OP(len); - } - } - LOAD_ATOM(0, atom); - } - - id = ATOM_TO_JSID(atom); - JSProperty *prop; - if (!js_FindPropertyHelper(cx, id, true, global, &obj, &obj2, &prop)) - goto error; - if (!prop) { - atomNotDefined = atom; - goto atom_not_defined; - } -} - -do_incop: -{ - /* - * We need a root to store the value to leave on the stack until - * we have done with obj->setProperty. - */ - PUSH_NULL(); - if (!obj->getGeneric(cx, id, ®s.sp[-1])) - goto error; - - const JSCodeSpec *cs = &js_CodeSpec[op]; - JS_ASSERT(cs->ndefs == 1); - JS_ASSERT((cs->format & JOF_TMPSLOT_MASK) >= JOF_TMPSLOT2); - - uint32 format = cs->format; - uint32 setPropFlags = (JOF_MODE(format) == JOF_NAME) - ? JSRESOLVE_ASSIGNING - : JSRESOLVE_ASSIGNING | JSRESOLVE_QUALIFIED; - - Value &ref = regs.sp[-1]; - int32_t tmp; - if (JS_LIKELY(ref.isInt32() && CanIncDecWithoutOverflow(tmp = ref.toInt32()))) { - int incr = (format & JOF_INC) ? 1 : -1; - if (format & JOF_POST) - ref.getInt32Ref() = tmp + incr; - else - ref.getInt32Ref() = tmp += incr; - - { - JSAutoResolveFlags rf(cx, setPropFlags); - if (!obj->setGeneric(cx, id, &ref, script->strictModeCode)) - goto error; - } - - /* - * We must set regs.sp[-1] to tmp for both post and pre increments - * as the setter overwrites regs.sp[-1]. - */ - ref.setInt32(tmp); - } else { - /* We need an extra root for the result. */ - PUSH_NULL(); - if (!DoIncDec(cx, cs, ®s.sp[-2], ®s.sp[-1])) - goto error; - - { - JSAutoResolveFlags rf(cx, setPropFlags); - if (!obj->setGeneric(cx, id, ®s.sp[-1], script->strictModeCode)) - goto error; - } - - regs.sp--; - } - - if (cs->nuses == 0) { - /* regs.sp[-1] already contains the result of name increment. */ - } else { - regs.sp[-1 - cs->nuses] = regs.sp[-1]; - regs.sp -= cs->nuses; - } - len = cs->length + GetDecomposeLength(regs.pc, cs->length); - DO_NEXT_OP(len); -} -} + /* No-op */ +END_CASE(JSOP_INCPROP) { int incr, incr2; @@ -3164,19 +2996,6 @@ BEGIN_CASE(JSOP_THIS) PUSH_COPY(regs.fp()->thisValue()); END_CASE(JSOP_THIS) -BEGIN_CASE(JSOP_UNBRANDTHIS) -{ - if (!ComputeThis(cx, regs.fp())) - goto error; - Value &thisv = regs.fp()->thisValue(); - if (thisv.isObject()) { - JSObject *obj = &thisv.toObject(); - if (obj->isNative()) - obj->unbrand(cx); - } -} -END_CASE(JSOP_UNBRANDTHIS) - BEGIN_CASE(JSOP_GETPROP) BEGIN_CASE(JSOP_GETXPROP) BEGIN_CASE(JSOP_LENGTH) @@ -3235,17 +3054,8 @@ BEGIN_CASE(JSOP_LENGTH) JSAtom *atom; JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom); if (!atom) { - ASSERT_VALID_PROPERTY_CACHE_HIT(0, aobj, obj2, entry); - if (entry->vword.isFunObj()) { - rval.setObject(entry->vword.toFunObj()); - } else if (entry->vword.isSlot()) { - uint32 slot = entry->vword.toSlot(); - rval = obj2->nativeGetSlot(slot); - } else { - JS_ASSERT(entry->vword.isShape()); - const Shape *shape = entry->vword.toShape(); - NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval); - } + ASSERT_VALID_PROPERTY_CACHE_HIT(aobj, obj2, entry); + NATIVE_GET(cx, obj, obj2, entry->prop, JSGET_METHOD_BARRIER, &rval); break; } @@ -3303,17 +3113,8 @@ BEGIN_CASE(JSOP_CALLPROP) JSAtom *atom; JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom); if (!atom) { - ASSERT_VALID_PROPERTY_CACHE_HIT(0, aobj, obj2, entry); - if (entry->vword.isFunObj()) { - rval.setObject(entry->vword.toFunObj()); - } else if (entry->vword.isSlot()) { - uint32 slot = entry->vword.toSlot(); - rval = obj2->nativeGetSlot(slot); - } else { - JS_ASSERT(entry->vword.isShape()); - const Shape *shape = entry->vword.toShape(); - NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval); - } + ASSERT_VALID_PROPERTY_CACHE_HIT(aobj, obj2, entry); + NATIVE_GET(cx, &objv.toObject(), obj2, entry->prop, JSGET_NO_METHOD_BARRIER, &rval); regs.sp[-1] = rval; assertSameCompartment(cx, regs.sp[-1]); PUSH_COPY(lval); @@ -3361,11 +3162,6 @@ BEGIN_CASE(JSOP_CALLPROP) } END_CASE(JSOP_CALLPROP) -BEGIN_CASE(JSOP_UNBRAND) - JS_ASSERT(regs.sp - regs.fp()->slots() >= 1); - regs.sp[-1].toObject().unbrand(cx); -END_CASE(JSOP_UNBRAND) - BEGIN_CASE(JSOP_SETGNAME) BEGIN_CASE(JSOP_SETNAME) BEGIN_CASE(JSOP_SETPROP) @@ -3411,89 +3207,30 @@ BEGIN_CASE(JSOP_SETMETHOD) * know that the entry applies to regs.pc and that obj's shape * matches. * - * The entry predicts either a new property to be added directly to - * obj by this set, or on an existing "own" property, or on a - * prototype property that has a setter. + * The entry predicts a set either an existing "own" property, or + * on a prototype property that has a setter. */ - const Shape *shape = entry->vword.toShape(); + const Shape *shape = entry->prop; JS_ASSERT_IF(shape->isDataDescriptor(), shape->writable()); - JS_ASSERT_IF(shape->hasSlot(), entry->vcapTag() == 0); + JS_ASSERT_IF(shape->hasSlot(), !entry->vindex); - /* - * Fastest path: check whether obj already has the cached shape and - * call NATIVE_SET and break to get out of the do-while(0). But we - * can call NATIVE_SET only for a direct or proto-setter hit. - */ - if (!entry->adding()) { - if (entry->vcapTag() == 0 || - ((obj2 = obj->getProto()) && obj2->shape() == entry->vshape())) - { + if (entry->vindex == 0 || + ((obj2 = obj->getProto()) && obj2->lastProperty() == entry->pshape)) { #ifdef DEBUG - if (entry->directHit()) { - JS_ASSERT(obj->nativeContains(cx, *shape)); - } else { - JS_ASSERT(obj2->nativeContains(cx, *shape)); - JS_ASSERT(entry->vcapTag() == 1); - JS_ASSERT(entry->kshape != entry->vshape()); - JS_ASSERT(!shape->hasSlot()); - } + if (entry->directHit()) { + JS_ASSERT(obj->nativeContains(cx, *shape)); + } else { + JS_ASSERT(obj2->nativeContains(cx, *shape)); + JS_ASSERT(entry->vindex == 1); + JS_ASSERT(entry->kshape != entry->pshape); + JS_ASSERT(!shape->hasSlot()); + } #endif - PCMETER(cache->pchits++); - PCMETER(cache->setpchits++); - NATIVE_SET(cx, obj, shape, entry, script->strictModeCode, &rval); - break; - } - } else { - JS_ASSERT(obj->isExtensible()); - - if (obj->nativeEmpty()) { - if (!obj->ensureClassReservedSlotsForEmptyObject(cx)) - goto error; - } - - uint32 slot; - if (shape->previous() == obj->lastProperty() && - entry->vshape() == rt->protoHazardShape && - shape->hasDefaultSetter() && - obj->getClass()->addProperty == JS_PropertyStub) { - slot = shape->slot; - JS_ASSERT(slot == obj->slotSpan()); - - /* - * Fast path: adding a plain old property that was once at - * the frontier of the property tree, whose slot is next to - * claim among the already-allocated slots in obj, where - * shape->table has not been created yet. - */ - PCMETER(cache->pchits++); - PCMETER(cache->addpchits++); - - if (slot < obj->numSlots()) { - JS_ASSERT(obj->getSlot(slot).isUndefined()); - } else { - if (!obj->allocSlot(cx, &slot)) - goto error; - JS_ASSERT(slot == shape->slot); - } - - /* Simply extend obj's property tree path with shape! */ - obj->extend(cx, shape); - - /* - * No method change check here because here we are adding a - * new property, not updating an existing slot's value that - * might contain a method of a branded shape. - */ - obj->nativeSetSlotWithType(cx, shape, rval); - - /* - * Purge the property cache of the id we may have just - * shadowed in obj's scope and proto chains. - */ - js_PurgeScopeChain(cx, obj, shape->propid); - break; - } + PCMETER(cache->pchits++); + PCMETER(cache->setpchits++); + NATIVE_SET(cx, obj, shape, entry, script->strictModeCode, &rval); + break; } PCMETER(cache->setpcmisses++); @@ -3713,11 +3450,10 @@ BEGIN_CASE(JSOP_FUNAPPLY) bool construct = (*regs.pc == JSOP_NEW); - JSObject *callee; JSFunction *fun; /* Don't bother trying to fast-path calls to scripted non-constructors. */ - if (!IsFunctionObject(args.calleev(), &callee, &fun) || !fun->isInterpretedConstructor()) { + if (!IsFunctionObject(args.calleev(), &fun) || !fun->isInterpretedConstructor()) { if (construct) { if (!InvokeConstructorKernel(cx, args)) goto error; @@ -3737,7 +3473,7 @@ BEGIN_CASE(JSOP_FUNAPPLY) InitialFrameFlags initial = construct ? INITIAL_CONSTRUCT : INITIAL_NONE; JSScript *newScript = fun->script(); - if (!cx->stack.pushInlineFrame(cx, regs, args, *callee, fun, newScript, initial)) + if (!cx->stack.pushInlineFrame(cx, regs, args, *fun, newScript, initial)) goto error; RESTORE_INTERP_VARS(); @@ -3804,7 +3540,6 @@ BEGIN_CASE(JSOP_CALLNAME) if (global) obj = obj->getGlobal(); - const Shape *shape; Value rval; PropertyCacheEntry *entry; @@ -3812,18 +3547,9 @@ BEGIN_CASE(JSOP_CALLNAME) JSAtom *atom; JS_PROPERTY_CACHE(cx).test(cx, regs.pc, obj, obj2, entry, atom); if (!atom) { - ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry); - if (entry->vword.isFunObj()) { - PUSH_OBJECT(entry->vword.toFunObj()); - } else if (entry->vword.isSlot()) { - uintN slot = entry->vword.toSlot(); - PUSH_COPY(obj2->nativeGetSlot(slot)); - } else { - JS_ASSERT(entry->vword.isShape()); - shape = entry->vword.toShape(); - NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval); - PUSH_COPY(rval); - } + ASSERT_VALID_PROPERTY_CACHE_HIT(obj, obj2, entry); + NATIVE_GET(cx, obj, obj2, entry->prop, JSGET_METHOD_BARRIER, &rval); + PUSH_COPY(rval); TypeScript::Monitor(cx, script, regs.pc, regs.sp[-1]); @@ -3856,7 +3582,7 @@ BEGIN_CASE(JSOP_CALLNAME) if (!obj->getGeneric(cx, id, &rval)) goto error; } else { - shape = (Shape *)prop; + Shape *shape = (Shape *)prop; JSObject *normalized = obj; if (normalized->getClass() == &WithClass && !shape->hasDefaultGetter()) normalized = js_UnwrapWithObject(cx, normalized); @@ -4232,8 +3958,7 @@ BEGIN_CASE(JSOP_CALLFCSLOT) uintN index = GET_UINT16(regs.pc); JSObject *obj = &argv[-2].toObject(); - JS_ASSERT(index < obj->getFunctionPrivate()->script()->bindings.countUpvars()); - PUSH_COPY(obj->getFlatClosureUpvar(index)); + PUSH_COPY(obj->toFunction()->getFlatClosureUpvar(index)); TypeScript::Monitor(cx, script, regs.pc, regs.sp[-1]); if (op == JSOP_CALLFCSLOT) PUSH_UNDEFINED(); @@ -4324,8 +4049,8 @@ BEGIN_CASE(JSOP_DEFFUN) * windows, and user-defined JS functions precompiled and then shared among * requests in server-side JS. */ - if (obj->getParent() != obj2) { - obj = CloneFunctionObject(cx, fun, obj2, true); + if (obj->toFunction()->environment() != obj2) { + obj = CloneFunctionObjectIfNotSingleton(cx, fun, obj2); if (!obj) goto error; JS_ASSERT_IF(script->hasGlobal(), obj->getProto() == fun->getProto()); @@ -4451,7 +4176,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN) JSObject *obj = fun; if (fun->isNullClosure()) { - obj = CloneFunctionObject(cx, fun, ®s.fp()->scopeChain(), true); + obj = CloneFunctionObjectIfNotSingleton(cx, fun, ®s.fp()->scopeChain()); if (!obj) goto error; } else { @@ -4460,8 +4185,8 @@ BEGIN_CASE(JSOP_DEFLOCALFUN) if (!parent) goto error; - if (obj->getParent() != parent) { - obj = CloneFunctionObject(cx, fun, parent, true); + if (obj->toFunction()->environment() != parent) { + obj = CloneFunctionObjectIfNotSingleton(cx, fun, parent); if (!obj) goto error; } @@ -4517,8 +4242,7 @@ BEGIN_CASE(JSOP_LAMBDA) JSObject *obj2 = &lref.toObject(); JS_ASSERT(obj2->isObject()); #endif - - fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(pc2 - regs.pc))); + JS_ASSERT(fun->methodAtom() == script->getAtom(GET_FULL_INDEX(pc2 - regs.pc))); break; } @@ -4529,7 +4253,7 @@ BEGIN_CASE(JSOP_LAMBDA) #endif const Value &lref = regs.sp[-1]; if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) { - fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(pc2 - regs.pc))); + JS_ASSERT(fun->methodAtom() == script->getAtom(GET_FULL_INDEX(pc2 - regs.pc))); break; } } else if (op2 == JSOP_CALL) { @@ -4548,11 +4272,10 @@ BEGIN_CASE(JSOP_LAMBDA) * is the callee for this JSOP_CALL. */ const Value &cref = regs.sp[1 - (iargc + 2)]; - JSObject *callee; + JSFunction *fun; - if (IsFunctionObject(cref, &callee)) { - JSFunction *calleeFun = callee->getFunctionPrivate(); - if (Native native = calleeFun->maybeNative()) { + if (IsFunctionObject(cref, &fun)) { + if (Native native = fun->maybeNative()) { if ((iargc == 1 && native == array_sort) || (iargc == 2 && native == str_replace)) { break; @@ -4573,7 +4296,7 @@ BEGIN_CASE(JSOP_LAMBDA) goto error; } - obj = CloneFunctionObject(cx, fun, parent, true); + obj = CloneFunctionObjectIfNotSingleton(cx, fun, parent); if (!obj) goto error; } while (0); @@ -4734,7 +4457,7 @@ BEGIN_CASE(JSOP_NEWINIT) if (i == JSProto_Array) { obj = NewDenseEmptyArray(cx); } else { - gc::AllocKind kind = GuessObjectGCKind(0, false); + gc::AllocKind kind = GuessObjectGCKind(0); obj = NewBuiltinClassInstance(cx, &ObjectClass, kind); } @@ -4806,48 +4529,24 @@ BEGIN_CASE(JSOP_INITMETHOD) JS_ASSERT(obj->isObject()); /* - * Probe the property cache. - * - * On a hit, if the cached shape has a non-default setter, it must be - * __proto__. If shape->previous() != obj->lastProperty(), there must be a - * repeated property name. The fast path does not handle these two cases. + * Probe the property cache to see if this is a set on an existing property + * added by a NEWOBJECT or a previous INITPROP. If the cached shape has a + * non-default setter, it must be __proto__, so don't handle this. */ PropertyCacheEntry *entry; - const Shape *shape; - if (JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, &shape, &entry) && - shape->hasDefaultSetter() && - shape->previous() == obj->lastProperty()) - { + JSObject *obj2; + JSAtom *atom; + if (JS_PROPERTY_CACHE(cx).testForSet(cx, regs.pc, obj, &entry, &obj2, &atom) && + entry->prop->hasDefaultSetter() && + entry->vindex == 0) { + JS_ASSERT(obj == obj2); /* Fast path. Property cache hit. */ - uint32 slot = shape->slot; - - JS_ASSERT(slot == obj->slotSpan()); - JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass())); - if (slot < obj->numSlots()) { - JS_ASSERT(obj->getSlot(slot).isUndefined()); - } else { - if (!obj->allocSlot(cx, &slot)) - goto error; - JS_ASSERT(slot == shape->slot); - } - - /* A new object, or one we just extended in a recent initprop op. */ - JS_ASSERT(!obj->lastProperty() || - obj->shape() == obj->lastProperty()->shapeid); - obj->extend(cx, shape); - - /* - * No method change check here because here we are adding a new - * property, not updating an existing slot's value that might - * contain a method of a branded shape. - */ - obj->nativeSetSlotWithType(cx, shape, rval); + obj->nativeSetSlotWithType(cx, entry->prop, rval); } else { PCMETER(JS_PROPERTY_CACHE(cx).inipcmisses++); + LOAD_ATOM(0, atom); /* Get the immediate property name into id. */ - JSAtom *atom; - LOAD_ATOM(0, atom); jsid id = ATOM_TO_JSID(atom); uintN defineHow = (op == JSOP_INITMETHOD) @@ -5500,13 +5199,13 @@ BEGIN_CASE(JSOP_ENTERBLOCK) */ JSObject *obj2 = ®s.fp()->scopeChain(); while (obj2->isWith()) - obj2 = obj2->getParent(); + obj2 = obj2->internalScopeChain(); if (obj2->isBlock() && obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, regs.fp())) { JSObject *youngestProto = obj2->getProto(); JS_ASSERT(youngestProto->isStaticBlock()); JSObject *parent = obj; - while ((parent = parent->getParent()) != youngestProto) + while ((parent = parent->scopeChain()) != youngestProto) JS_ASSERT(parent); } #endif diff --git a/js/src/jsinterp.h b/js/src/jsinterp.h index 0ba6462afa54..e6abcfc27517 100644 --- a/js/src/jsinterp.h +++ b/js/src/jsinterp.h @@ -348,6 +348,14 @@ Debug_SetValueRangeToCrashOnTouch(Value *vec, size_t len) #endif } +static JS_ALWAYS_INLINE void +Debug_SetValueRangeToCrashOnTouch(HeapValue *vec, size_t len) +{ +#ifdef DEBUG + Debug_SetValueRangeToCrashOnTouch((Value *) vec, len); +#endif +} + } /* namespace js */ #endif /* jsinterp_h___ */ diff --git a/js/src/jsiter.cpp b/js/src/jsiter.cpp index 47e74436cb4d..27d2c9ed3e2b 100644 --- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -92,7 +92,6 @@ static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly Class js::IteratorClass = { "Iterator", JSCLASS_HAS_PRIVATE | - JSCLASS_CONCURRENT_FINALIZER | JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ @@ -118,6 +117,8 @@ Class js::IteratorClass = { } }; +static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::FINALIZE_OBJECT2; + void NativeIterator::mark(JSTracer *trc) { @@ -220,8 +221,8 @@ EnumerateNativeProperties(JSContext *cx, JSObject *obj, JSObject *pobj, uintN fl for (Shape::Range r = pobj->lastProperty()->all(); !r.empty(); r.popFront()) { const Shape &shape = r.front(); - if (!JSID_IS_DEFAULT_XML_NAMESPACE(shape.propid) && - !Enumerate(cx, obj, pobj, shape.propid, shape.enumerable(), flags, ht, props)) + if (!JSID_IS_DEFAULT_XML_NAMESPACE(shape.propid()) && + !Enumerate(cx, obj, pobj, shape.propid(), shape.enumerable(), flags, ht, props)) { return false; } @@ -412,23 +413,21 @@ static inline JSObject * NewIteratorObject(JSContext *cx, uintN flags) { if (flags & JSITER_ENUMERATE) { - /* - * Non-escaping native enumerator objects do not need map, proto, or - * parent. However, code in jstracer.cpp and elsewhere may find such a - * native enumerator object via the stack and (as for all objects that - * are not stillborn, with the exception of "NoSuchMethod" internal - * helper objects) expect it to have a non-null map pointer, so we - * share an empty Enumerator scope in the runtime. - */ - JSObject *obj = js_NewGCObject(cx, FINALIZE_OBJECT0); + types::TypeObject *type = cx->compartment->getEmptyType(cx); + if (!type) + return NULL; + + Shape *emptyEnumeratorShape = EmptyShape::getInitialShape(cx, &IteratorClass, NULL, NULL, + ITERATOR_FINALIZE_KIND); + if (!emptyEnumeratorShape) + return NULL; + + JSObject *obj = JSObject::create(cx, ITERATOR_FINALIZE_KIND, + emptyEnumeratorShape, type, NULL); if (!obj) return NULL; - EmptyShape *emptyEnumeratorShape = EmptyShape::getEmptyEnumeratorShape(cx); - if (!emptyEnumeratorShape) - return NULL; - obj->init(cx, &IteratorClass, &types::emptyTypeObject, NULL, NULL, false); - obj->setMap(emptyEnumeratorShape); + JS_ASSERT(obj->numFixedSlots() == JSObject::ITER_CLASS_NFIXED_SLOTS); return obj; } @@ -440,7 +439,7 @@ NativeIterator::allocateIterator(JSContext *cx, uint32 slength, const AutoIdVect { size_t plength = props.length(); NativeIterator *ni = (NativeIterator *) - cx->malloc_(sizeof(NativeIterator) + plength * sizeof(jsid) + slength * sizeof(uint32)); + cx->malloc_(sizeof(NativeIterator) + plength * sizeof(jsid) + slength * sizeof(Shape *)); if (!ni) return NULL; ni->props_array = ni->props_cursor = (HeapId *) (ni + 1); @@ -457,7 +456,7 @@ NativeIterator::init(JSObject *obj, uintN flags, uint32 slength, uint32 key) { this->obj.init(obj); this->flags = flags; - this->shapes_array = (uint32 *) this->props_end; + this->shapes_array = (const Shape **) this->props_end; this->shapes_length = slength; this->shapes_key = key; } @@ -482,7 +481,8 @@ VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &key JS_ASSERT(!(flags & JSITER_FOREACH)); if (obj) { - obj->flags |= JSObject::ITERATED; + if (obj->hasSingletonType() && !obj->setIteratedSingleton(cx)) + return false; types::MarkTypeObjectFlags(cx, obj, types::OBJECT_FLAG_ITERATED); } @@ -506,7 +506,7 @@ VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &key JSObject *pobj = obj; size_t ind = 0; do { - ni->shapes_array[ind++] = pobj->shape(); + ni->shapes_array[ind++] = pobj->lastProperty(); pobj = pobj->getProto(); } while (pobj); JS_ASSERT(ind == slength); @@ -534,7 +534,8 @@ VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &k JS_ASSERT(flags & JSITER_FOREACH); if (obj) { - obj->flags |= JSObject::ITERATED; + if (obj->hasSingletonType() && !obj->setIteratedSingleton(cx)) + return false; types::MarkTypeObjectFlags(cx, obj, types::OBJECT_FLAG_ITERATED); } @@ -574,7 +575,7 @@ UpdateNativeIterator(NativeIterator *ni, JSObject *obj) bool GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp) { - Vector shapes(cx); + Vector shapes(cx); uint32 key = 0; bool keysOnly = (flags == JSITER_ENUMERATE); @@ -603,9 +604,9 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp) NativeIterator *lastni = last->getNativeIterator(); if (!(lastni->flags & (JSITER_ACTIVE|JSITER_UNREUSABLE)) && obj->isNative() && - obj->shape() == lastni->shapes_array[0] && + obj->lastProperty() == lastni->shapes_array[0] && proto && proto->isNative() && - proto->shape() == lastni->shapes_array[1] && + proto->lastProperty() == lastni->shapes_array[1] && !proto->getProto()) { vp->setObject(*last); UpdateNativeIterator(lastni, obj); @@ -623,14 +624,15 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp) JSObject *pobj = obj; do { if (!pobj->isNative() || + pobj->hasUncacheableProto() || obj->getOps()->enumerate || pobj->getClass()->enumerate != JS_EnumerateStub) { shapes.clear(); goto miss; } - uint32 shape = pobj->shape(); - key = (key + (key << 16)) ^ shape; - if (!shapes.append(shape)) + const Shape *shape = pobj->lastProperty(); + key = (key + (key << 16)) ^ ((jsuword)shape >> 3); + if (!shapes.append((Shape *) shape)) return false; pobj = pobj->getProto(); } while (pobj); @@ -1210,7 +1212,7 @@ js_NewGenerator(JSContext *cx) JSObject *proto = global->getOrCreateGeneratorPrototype(cx); if (!proto) return NULL; - JSObject *obj = NewNonFunction(cx, &GeneratorClass, proto, global); + JSObject *obj = NewObjectWithGivenProto(cx, &GeneratorClass, proto, global); if (!obj) return NULL; diff --git a/js/src/jsiter.h b/js/src/jsiter.h index 5cf2bcdccc72..6cfe2d36dc1b 100644 --- a/js/src/jsiter.h +++ b/js/src/jsiter.h @@ -61,11 +61,11 @@ namespace js { struct NativeIterator { - HeapPtrObject obj; + HeapPtrObject obj; HeapId *props_array; HeapId *props_cursor; HeapId *props_end; - uint32 *shapes_array; + const Shape **shapes_array; uint32 shapes_length; uint32 shapes_key; uint32 flags; @@ -223,16 +223,6 @@ js_LiveFrameIfGenerator(js::StackFrame *fp) #endif -namespace js { - -static inline bool -IsStopIteration(const js::Value &v) -{ - return v.isObject() && v.toObject().isStopIteration(); -} - -} /* namespace js */ - extern JSObject * js_InitIteratorClasses(JSContext *cx, JSObject *obj); diff --git a/js/src/jsmath.cpp b/js/src/jsmath.cpp index e7d65c23ccc0..416cbc124520 100644 --- a/js/src/jsmath.cpp +++ b/js/src/jsmath.cpp @@ -703,7 +703,7 @@ js_IsMathFunction(Native native) JSObject * js_InitMathClass(JSContext *cx, JSObject *obj) { - JSObject *Math = NewNonFunction(cx, &MathClass, NULL, obj); + JSObject *Math = NewObjectWithClassProto(cx, &MathClass, NULL, obj); if (!Math || !Math->setSingletonType(cx)) return NULL; diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 0f8c77247f64..b7f63a1c8a46 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -116,8 +116,6 @@ using namespace js; using namespace js::gc; using namespace js::types; -JS_FRIEND_DATA(js::Shape) Shape::sharedNonNative(SHAPELESS); - Class js::ObjectClass = { js_Object_str, JSCLASS_HAS_CACHED_PROTO(JSProto_Object), @@ -957,7 +955,7 @@ static void AssertInnerizedScopeChain(JSContext *cx, JSObject &scopeobj) { #ifdef DEBUG - for (JSObject *o = &scopeobj; o; o = o->getParent()) { + for (JSObject *o = &scopeobj; o; o = o->scopeChain()) { if (JSObjectOp op = o->getClass()->ext.innerObject) JS_ASSERT(op(cx, o) == o); } @@ -1034,25 +1032,14 @@ EvalCacheLookup(JSContext *cx, JSLinearString *str, StackFrame *caller, uintN st if (src == str || EqualStrings(src, str)) { /* - * Source matches, qualify by comparing scopeobj to the - * COMPILE_N_GO-memoized parent of the first literal - * function or regexp object if any. If none, then this - * script has no compiled-in dependencies on the prior - * eval's scopeobj. + * Source matches. Make sure there are no inner objects + * which might use the wrong parent and/or call scope by + * reusing the previous eval's script. Skip the script's + * first object, which entrains the eval's scope. */ - JSObjectArray *objarray = script->objects(); - int i = 1; - - if (objarray->length == 1) { - if (JSScript::isValidOffset(script->regexpsOffset)) { - objarray = script->regexps(); - i = 0; - } else { - i = -1; - } - } - if (i < 0 || - objarray->vector[i]->getParent() == &scopeobj) { + JS_ASSERT(script->objects()->length >= 1); + if (script->objects()->length == 1 && + !JSScript::isValidOffset(script->regexpsOffset)) { JS_ASSERT(staticLevel == script->staticLevel); *scriptp = script->evalHashLink(); script->evalHashLink() = NULL; @@ -1333,7 +1320,7 @@ DirectEval(JSContext *cx, const CallArgs &args) JS_ASSERT(IsBuiltinEvalForScope(&caller->scopeChain(), args.calleev())); JS_ASSERT(js_GetOpcode(cx, cx->fp()->script(), cx->regs().pc) == JSOP_EVAL); - AutoFunctionCallProbe callProbe(cx, args.callee().getFunctionPrivate(), caller->script()); + AutoFunctionCallProbe callProbe(cx, args.callee().toFunction(), caller->script()); JSObject *scopeChain = GetScopeChainFast(cx, caller, JSOP_EVAL, JSOP_EVAL_LENGTH + JSOP_LINENO_LENGTH); @@ -1358,8 +1345,8 @@ IsAnyBuiltinEval(JSFunction *fun) JSPrincipals * PrincipalsForCompiledCode(const CallReceiver &call, JSContext *cx) { - JS_ASSERT(IsAnyBuiltinEval(call.callee().getFunctionPrivate()) || - IsBuiltinFunctionConstructor(call.callee().getFunctionPrivate())); + JS_ASSERT(IsAnyBuiltinEval(call.callee().toFunction()) || + IsBuiltinFunctionConstructor(call.callee().toFunction())); /* * To compute the principals of the compiled eval/Function code, we simply @@ -2620,8 +2607,7 @@ obj_create(JSContext *cx, uintN argc, Value *vp) * Use the callee's global as the parent of the new object to avoid dynamic * scoping (i.e., using the caller's global). */ - JSObject *obj = NewNonFunction(cx, &ObjectClass, proto, - vp->toObject().getGlobal()); + JSObject *obj = NewObjectWithGivenProto(cx, &ObjectClass, proto, vp->toObject().getGlobal()); if (!obj) return JS_FALSE; vp->setObject(*obj); /* Root and prepare for eventual return. */ @@ -2707,14 +2693,6 @@ obj_preventExtensions(JSContext *cx, uintN argc, Value *vp) return obj->preventExtensions(cx, &props); } -size_t -JSObject::sizeOfSlotsArray(JSMallocSizeOfFun mallocSizeOf) -{ - if (!hasSlotsArray()) - return 0; - return mallocSizeOf(slots, numDynamicSlots(numSlots()) * sizeof(js::Value)); -} - bool JSObject::sealOrFreeze(JSContext *cx, ImmutabilityType it) { @@ -2924,6 +2902,142 @@ js_Object(JSContext *cx, uintN argc, Value *vp) return JS_TRUE; } +static inline JSObject * +NewObject(JSContext *cx, Class *clasp, types::TypeObject *type, JSObject *parent, + gc::AllocKind kind) +{ + JS_ASSERT(clasp != &ArrayClass); + JS_ASSERT_IF(clasp == &FunctionClass, + kind == JSFunction::FinalizeKind || kind == JSFunction::ExtendedFinalizeKind); + + Shape *shape = EmptyShape::getInitialShape(cx, clasp, type->proto, parent, kind); + if (!shape) + return NULL; + + HeapValue *slots; + if (!PreallocateObjectDynamicSlots(cx, shape, &slots)) + return NULL; + + JSObject* obj = JSObject::create(cx, kind, shape, type, slots); + if (!obj) + return NULL; + + Probes::createObject(cx, obj); + return obj; +} + +JSObject * +js::NewObjectWithGivenProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent, + gc::AllocKind kind) +{ + if (CanBeFinalizedInBackground(kind, clasp)) + kind = GetBackgroundAllocKind(kind); + + NewObjectCache &cache = cx->compartment->newObjectCache; + + NewObjectCache::EntryIndex entry = -1; + if (proto && (!parent || parent == proto->getParent()) && !proto->isGlobal()) { + if (cache.lookupProto(clasp, proto, kind, &entry)) + return cache.newObjectFromHit(cx, entry); + } + + types::TypeObject *type = proto ? proto->getNewType(cx) : cx->compartment->getEmptyType(cx); + if (!type) + return NULL; + + /* + * Default parent to the parent of the prototype, which was set from + * the parent of the prototype's constructor. + */ + if (!parent && proto) + parent = proto->getParent(); + + JSObject *obj = NewObject(cx, clasp, type, parent, kind); + if (!obj) + return NULL; + + if (entry != -1 && !obj->hasDynamicSlots()) + cache.fillProto(entry, clasp, proto, kind, obj); + + return obj; +} + +JSObject * +js::NewObjectWithClassProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent, + gc::AllocKind kind) +{ + if (proto) + return NewObjectWithGivenProto(cx, clasp, proto, parent, kind); + + if (CanBeFinalizedInBackground(kind, clasp)) + kind = GetBackgroundAllocKind(kind); + + if (!parent) + parent = GetCurrentGlobal(cx); + + /* + * Use the object cache, except for classes without a cached proto key. + * On these objects, FindProto will do a dynamic property lookup to get + * global[className].prototype, where changes to either the className or + * prototype property would render the cached lookup incorrect. For classes + * with a proto key, the prototype created during class initialization is + * stored in an immutable slot on the global (except for ClearScope, which + * will flush the new object cache). + */ + JSProtoKey protoKey = GetClassProtoKey(clasp); + + NewObjectCache &cache = cx->compartment->newObjectCache; + + NewObjectCache::EntryIndex entry = -1; + if (parent->isGlobal() && protoKey != JSProto_Null) { + if (cache.lookupGlobal(clasp, parent->asGlobal(), kind, &entry)) + return cache.newObjectFromHit(cx, entry); + } + + if (!FindProto(cx, clasp, parent, &proto)) + return NULL; + + types::TypeObject *type = proto->getNewType(cx); + if (!type) + return NULL; + + JSObject *obj = NewObject(cx, clasp, type, parent, kind); + if (!obj) + return NULL; + + if (entry != -1 && !obj->hasDynamicSlots()) + cache.fillGlobal(entry, clasp, parent->asGlobal(), kind, obj); + + return obj; +} + +JSObject * +js::NewObjectWithType(JSContext *cx, types::TypeObject *type, JSObject *parent, gc::AllocKind kind) +{ + JS_ASSERT(type->proto->hasNewType(type)); + JS_ASSERT(parent); + + if (CanBeFinalizedInBackground(kind, &ObjectClass)) + kind = GetBackgroundAllocKind(kind); + + NewObjectCache &cache = cx->compartment->newObjectCache; + + NewObjectCache::EntryIndex entry = -1; + if (parent == type->proto->getParent()) { + if (cache.lookupType(&ObjectClass, type, kind, &entry)) + return cache.newObjectFromHit(cx, entry); + } + + JSObject *obj = NewObject(cx, &ObjectClass, type, parent, kind); + if (!obj) + return NULL; + + if (entry != -1 && !obj->hasDynamicSlots()) + cache.fillType(entry, &ObjectClass, type, kind, obj); + + return obj; +} + JSObject * js::NewReshapedObject(JSContext *cx, TypeObject *type, JSObject *parent, gc::AllocKind kind, const Shape *shape) @@ -2932,18 +3046,18 @@ js::NewReshapedObject(JSContext *cx, TypeObject *type, JSObject *parent, if (!res) return NULL; - if (JSID_IS_EMPTY(shape->propid)) + if (shape->isEmptyShape()) return res; /* Get all the ids in the object, in order. */ js::AutoIdVector ids(cx); - for (unsigned i = 0; i <= shape->slot; i++) { + for (unsigned i = 0; i <= shape->slot(); i++) { if (!ids.append(JSID_VOID)) return NULL; } const js::Shape *nshape = shape; - while (!JSID_IS_EMPTY(nshape->propid)) { - ids[nshape->slot] = nshape->propid; + while (!nshape->isEmptyShape()) { + ids[nshape->slot()] = nshape->propid(); nshape = nshape->previous(); } @@ -2966,7 +3080,7 @@ js_CreateThis(JSContext *cx, JSObject *callee) Class *newclasp = &ObjectClass; if (clasp == &FunctionClass) { - JSFunction *fun = callee->getFunctionPrivate(); + JSFunction *fun = callee->toFunction(); if (fun->isNative() && fun->u.n.clasp) newclasp = fun->u.n.clasp; } @@ -2978,10 +3092,7 @@ js_CreateThis(JSContext *cx, JSObject *callee) JSObject *proto = protov.isObjectOrNull() ? protov.toObjectOrNull() : NULL; JSObject *parent = callee->getParent(); gc::AllocKind kind = NewObjectGCKind(cx, newclasp); - JSObject *obj = NewObject(cx, newclasp, proto, parent, kind); - if (obj) - obj->syncSpecialEquality(); - return obj; + return NewObjectWithClassProto(cx, newclasp, proto, parent, kind); } static inline JSObject * @@ -2996,7 +3107,7 @@ CreateThisForFunctionWithType(JSContext *cx, types::TypeObject *type, JSObject * gc::AllocKind kind = type->newScript->allocKind; JSObject *res = NewObjectWithType(cx, type, parent, kind); if (res) - res->initMap((Shape *) type->newScript->shape.get()); + JS_ALWAYS_TRUE(res->setLastProperty(cx, (Shape *) type->newScript->shape.get())); return res; } @@ -3010,17 +3121,17 @@ js_CreateThisForFunctionWithProto(JSContext *cx, JSObject *callee, JSObject *pro JSObject *res; if (proto) { - types::TypeObject *type = proto->getNewType(cx, callee->getFunctionPrivate()); + types::TypeObject *type = proto->getNewType(cx, callee->toFunction()); if (!type) return NULL; res = CreateThisForFunctionWithType(cx, type, callee->getParent()); } else { gc::AllocKind kind = NewObjectGCKind(cx, &ObjectClass); - res = NewNonFunction(cx, &ObjectClass, proto, callee->getParent(), kind); + res = NewObjectWithClassProto(cx, &ObjectClass, proto, callee->getParent(), kind); } if (res && cx->typeInferenceEnabled()) - TypeScript::SetThis(cx, callee->getFunctionPrivate()->script(), types::Type::ObjectType(res)); + TypeScript::SetThis(cx, callee->toFunction()->script(), types::Type::ObjectType(res)); return res; } @@ -3047,7 +3158,7 @@ js_CreateThisForFunction(JSContext *cx, JSObject *callee, bool newType) if (!obj->setSingletonType(cx)) return NULL; - JSScript *calleeScript = callee->getFunctionPrivate()->script(); + JSScript *calleeScript = callee->toFunction()->script(); TypeScript::SetThis(cx, calleeScript, types::Type::ObjectType(obj)); } @@ -3138,7 +3249,7 @@ js_InferFlags(JSContext *cx, uintN defaultFlags) format = cs->format; if (JOF_MODE(format) != JOF_NAME) flags |= JSRESOLVE_QUALIFIED; - if (format & (JOF_SET | JOF_FOR)) { + if (format & JOF_SET) { flags |= JSRESOLVE_ASSIGNING; } else if (cs->length >= 0) { pc += cs->length; @@ -3331,7 +3442,7 @@ with_ThisObject(JSContext *cx, JSObject *obj) Class js::WithClass = { "With", - JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(2) | JSCLASS_IS_ANONYMOUS, + JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(3) | JSCLASS_IS_ANONYMOUS, JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ @@ -3386,6 +3497,8 @@ Class js::WithClass = { } }; +static const gc::AllocKind WITH_FINALIZE_KIND = gc::FINALIZE_OBJECT4; + JS_REQUIRES_STACK JSObject * js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth) { @@ -3395,21 +3508,23 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth) if (!type) return NULL; - obj = js_NewGCObject(cx, FINALIZE_OBJECT2); - if (!obj) - return NULL; - StackFrame *priv = js_FloatingFrameIfGenerator(cx, cx->fp()); - obj->init(cx, &WithClass, type, parent, priv, false); - - EmptyShape *emptyWithShape = EmptyShape::getEmptyWithShape(cx); + Shape *emptyWithShape = EmptyShape::getInitialShape(cx, &WithClass, proto, + parent->getGlobal(), + WITH_FINALIZE_KIND); if (!emptyWithShape) return NULL; - obj->initMap(emptyWithShape); + obj = JSObject::create(cx, WITH_FINALIZE_KIND, emptyWithShape, type, NULL); + if (!obj) + return NULL; OBJ_SET_BLOCK_DEPTH(cx, obj, depth); + if (!obj->setInternalScopeChain(cx, parent)) + return NULL; + obj->setPrivate(priv); + AutoObjectRooter tvr(cx, obj); JSObject *thisp = proto->thisObject(cx); if (!thisp) @@ -3421,24 +3536,22 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth) return obj; } +static const uint32 BLOCK_RESERVED_SLOTS = 2; +static const gc::AllocKind BLOCK_FINALIZE_KIND = gc::FINALIZE_OBJECT4; + JSObject * js_NewBlockObject(JSContext *cx) { - /* - * Null obj's proto slot so that Object.prototype.* does not pollute block - * scopes and to give the block object its own scope. - */ - JSObject *blockObj = js_NewGCObject(cx, FINALIZE_OBJECT2); - if (!blockObj) + types::TypeObject *type = cx->compartment->getEmptyType(cx); + if (!type) return NULL; - EmptyShape *emptyBlockShape = EmptyShape::getEmptyBlockShape(cx); + Shape *emptyBlockShape = EmptyShape::getInitialShape(cx, &BlockClass, NULL, NULL, + BLOCK_FINALIZE_KIND); if (!emptyBlockShape) return NULL; - blockObj->init(cx, &BlockClass, &emptyTypeObject, NULL, NULL, false); - blockObj->initMap(emptyBlockShape); - return blockObj; + return JSObject::create(cx, FINALIZE_OBJECT4, emptyBlockShape, type, NULL); } JSObject * @@ -3446,28 +3559,38 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, StackFrame *fp) { JS_ASSERT(proto->isStaticBlock()); - size_t count = OBJ_BLOCK_COUNT(cx, proto); - gc::AllocKind kind = gc::GetGCObjectKind(count + 1); - TypeObject *type = proto->getNewType(cx); if (!type) return NULL; - JSObject *clone = js_NewGCObject(cx, kind); + HeapValue *slots; + if (!PreallocateObjectDynamicSlots(cx, proto->lastProperty(), &slots)) + return NULL; + + JSObject *clone = JSObject::create(cx, BLOCK_FINALIZE_KIND, proto->lastProperty(), type, slots); if (!clone) return NULL; StackFrame *priv = js_FloatingFrameIfGenerator(cx, fp); - /* The caller sets parent on its own. */ - clone->initClonedBlock(cx, type, priv); + /* Set the parent if necessary, as for call objects. */ + JSObject *global = priv->scopeChain().getGlobal(); + if (global != clone->getParent()) { + JS_ASSERT(clone->getParent() == NULL); + if (!clone->setParent(cx, global)) + return NULL; + } - if (!clone->ensureInstanceReservedSlots(cx, count + 1)) - return NULL; + JS_ASSERT(!clone->inDictionaryMode()); + JS_ASSERT(clone->isClonedBlock()); + JS_ASSERT(clone->slotSpan() >= OBJ_BLOCK_COUNT(cx, proto) + BLOCK_RESERVED_SLOTS); + clone->setPrivate(priv); clone->setSlot(JSSLOT_BLOCK_DEPTH, proto->getSlot(JSSLOT_BLOCK_DEPTH)); - JS_ASSERT(clone->isClonedBlock()); + if (clone->lastProperty()->extensibleParents() && !clone->generateOwnShape(cx)) + return NULL; + return clone; } @@ -3481,7 +3604,7 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind) /* Block objects should have all reserved slots allocated early. */ uintN count = OBJ_BLOCK_COUNT(cx, obj); - JS_ASSERT(obj->numSlots() >= JSSLOT_BLOCK_DEPTH + 1 + count); + JS_ASSERT(obj->slotSpan() >= JSSLOT_BLOCK_DEPTH + 1 + count); /* The block and its locals must be on the current stack for GC safety. */ uintN depth = OBJ_BLOCK_DEPTH(cx, obj); @@ -3499,7 +3622,7 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind) /* We must clear the private slot even with errors. */ obj->setPrivate(NULL); - fp->setScopeChainNoCallObj(*obj->getParent()); + fp->setScopeChainNoCallObj(*obj->internalScopeChain()); return normalUnwind; } @@ -3557,16 +3680,18 @@ JSObject::defineBlockVariable(JSContext *cx, jsid id, intN index) { JS_ASSERT(isStaticBlock()); - /* Use JSPROP_ENUMERATE to aid the disassembler. */ + /* + * Use JSPROP_ENUMERATE to aid the disassembler, and don't convert this + * object to dictionary mode so that we can clone the block's shape later. + */ uint32 slot = JSSLOT_FREE(&BlockClass) + index; const Shape *shape = addProperty(cx, id, block_getProperty, block_setProperty, slot, JSPROP_ENUMERATE | JSPROP_PERMANENT, - Shape::HAS_SHORTID, index); + Shape::HAS_SHORTID, index, + /* allowDictionary = */ false); if (!shape) return NULL; - if (slot >= numSlots() && !growSlots(cx, slot + 1)) - return NULL; return shape; } @@ -3621,10 +3746,10 @@ JS_CopyPropertiesFrom(JSContext *cx, JSObject *target, JSObject *obj) StrictPropertyOp setter = shape->setter(); if ((attrs & JSPROP_SETTER) && !cx->compartment->wrap(cx, &setter)) return false; - Value v = shape->hasSlot() ? obj->getSlot(shape->slot) : UndefinedValue(); + Value v = shape->hasSlot() ? obj->getSlot(shape->slot()) : UndefinedValue(); if (!cx->compartment->wrap(cx, &v)) return false; - if (!target->defineGeneric(cx, shape->propid, v, getter, setter, attrs)) + if (!target->defineGeneric(cx, shape->propid(), v, getter, setter, attrs)) return false; } return true; @@ -3634,9 +3759,7 @@ static bool CopySlots(JSContext *cx, JSObject *from, JSObject *to) { JS_ASSERT(!from->isNative() && !to->isNative()); - size_t nslots = from->numSlots(); - if (to->ensureSlots(cx, nslots)) - return false; + JS_ASSERT(from->getClass() == to->getClass()); size_t n = 0; if (from->isWrapper() && @@ -3646,7 +3769,8 @@ CopySlots(JSContext *cx, JSObject *from, JSObject *to) n = 2; } - for (; n < nslots; ++n) { + size_t span = JSCLASS_RESERVED_SLOTS(from->getClass()); + for (; n < span; ++n) { Value v = from->getSlot(n); if (!cx->compartment->wrap(cx, &v)) return false; @@ -3672,7 +3796,7 @@ JS_CloneObject(JSContext *cx, JSObject *obj, JSObject *proto, JSObject *parent) return NULL; } } - JSObject *clone = NewObject(cx, obj->getClass(), proto, parent, obj->getAllocKind()); + JSObject *clone = NewObjectWithGivenProto(cx, obj->getClass(), proto, parent, obj->getAllocKind()); if (!clone) return NULL; if (obj->isNative()) { @@ -3682,7 +3806,7 @@ JS_CloneObject(JSContext *cx, JSObject *obj, JSObject *proto, JSObject *parent) return NULL; } - if (obj->getClass()->flags & JSCLASS_HAS_PRIVATE) + if (obj->hasPrivate()) clone->setPrivate(obj->getPrivate()); } else { JS_ASSERT(obj->isProxy()); @@ -3697,11 +3821,18 @@ struct JSObject::TradeGutsReserved { JSContext *cx; Vector avals; Vector bvals; + int newafixed; + int newbfixed; + Shape *newashape; + Shape *newbshape; HeapValue *newaslots; HeapValue *newbslots; TradeGutsReserved(JSContext *cx) - : cx(cx), avals(cx), bvals(cx), newaslots(NULL), newbslots(NULL) + : cx(cx), avals(cx), bvals(cx), + newafixed(0), newbfixed(0), + newashape(NULL), newbshape(NULL), + newaslots(NULL), newbslots(NULL) {} ~TradeGutsReserved() @@ -3726,15 +3857,63 @@ JSObject::ReserveForTradeGuts(JSContext *cx, JSObject *a, JSObject *b, if (a->structSize() == b->structSize()) return true; + /* + * If either object is native, it needs a new shape to preserve the + * invariant that objects with the same shape have the same number of + * inline slots. The fixed slots will be updated in place during TradeGuts. + * Non-native objects need to be reshaped according to the new count. + */ + if (a->isNative()) { + if (!a->generateOwnShape(cx)) + return false; + } else { + reserved.newbshape = EmptyShape::getInitialShape(cx, a->getClass(), + a->getProto(), a->getParent(), + b->getAllocKind()); + if (!reserved.newbshape) + return false; + } + if (b->isNative()) { + if (!b->generateOwnShape(cx)) + return false; + } else { + reserved.newashape = EmptyShape::getInitialShape(cx, b->getClass(), + b->getProto(), b->getParent(), + a->getAllocKind()); + if (!reserved.newashape) + return false; + } + /* The avals/bvals vectors hold all original values from the objects. */ - unsigned acap = a->numSlots(); - unsigned bcap = b->numSlots(); + if (!reserved.avals.reserve(a->slotSpan())) + return false; + if (!reserved.bvals.reserve(b->slotSpan())) + return false; - if (!reserved.avals.reserve(acap)) - return false; - if (!reserved.bvals.reserve(bcap)) - return false; + JS_ASSERT(a->elements == emptyObjectElements); + JS_ASSERT(b->elements == emptyObjectElements); + + /* + * The newafixed/newbfixed hold the number of fixed slots in the objects + * after the swap. Adjust these counts according to whether the objects + * use their last fixed slot for storing private data. + */ + + reserved.newafixed = a->numFixedSlots(); + reserved.newbfixed = b->numFixedSlots(); + + if (a->hasPrivate()) { + reserved.newafixed++; + reserved.newbfixed--; + } + if (b->hasPrivate()) { + reserved.newbfixed++; + reserved.newafixed--; + } + + JS_ASSERT(reserved.newafixed >= 0); + JS_ASSERT(reserved.newbfixed >= 0); /* * The newaslots/newbslots arrays hold any dynamic slots for the objects @@ -3742,29 +3921,25 @@ JSObject::ReserveForTradeGuts(JSContext *cx, JSObject *a, JSObject *b, * other object. */ - unsigned afixed = a->numFixedSlots(); - unsigned bfixed = b->numFixedSlots(); + unsigned adynamic = dynamicSlotsCount(reserved.newafixed, b->slotSpan()); + unsigned bdynamic = dynamicSlotsCount(reserved.newbfixed, a->slotSpan()); - if (afixed < bcap) { - reserved.newaslots = (HeapValue *) cx->malloc_(sizeof(HeapValue) * (bcap - afixed)); + if (adynamic) { + reserved.newaslots = (HeapValue *) cx->malloc_(sizeof(HeapValue) * adynamic); if (!reserved.newaslots) return false; + Debug_SetValueRangeToCrashOnTouch(reserved.newaslots, adynamic); } - if (bfixed < acap) { - reserved.newbslots = (HeapValue *) cx->malloc_(sizeof(HeapValue) * (acap - bfixed)); + if (bdynamic) { + reserved.newbslots = (HeapValue *) cx->malloc_(sizeof(HeapValue) * bdynamic); if (!reserved.newbslots) return false; + Debug_SetValueRangeToCrashOnTouch(reserved.newbslots, bdynamic); } return true; } -void -JSObject::updateFixedSlots(uintN fixed) -{ - flags = (flags & ~FIXED_SLOTS_MASK) | (fixed << FIXED_SLOTS_SHIFT); -} - void JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &reserved) { @@ -3801,10 +3976,6 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved & } #endif - /* New types for a JSObject need to be stable when trading guts. */ - TypeObject *newTypeA = a->newType; - TypeObject *newTypeB = b->newType; - /* Trade the guts of the objects. */ const size_t size = a->structSize(); if (size == b->structSize()) { @@ -3826,18 +3997,8 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved & * the new layout for the other object. */ - /* - * If either object is native, it needs a new shape to preserve the - * invariant that objects with the same shape have the same number of - * inline slots. - */ - if (a->isNative()) - a->generateOwnShape(cx); - if (b->isNative()) - b->generateOwnShape(cx); - - unsigned acap = a->numSlots(); - unsigned bcap = b->numSlots(); + unsigned acap = a->slotSpan(); + unsigned bcap = b->slotSpan(); for (size_t i = 0; i < acap; i++) reserved.avals.infallibleAppend(a->getSlot(i)); @@ -3846,38 +4007,43 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved & reserved.bvals.infallibleAppend(b->getSlot(i)); /* Done with the dynamic slots. */ - if (a->hasSlotsArray()) + if (a->hasDynamicSlots()) cx->free_(a->slots); - if (b->hasSlotsArray()) + if (b->hasDynamicSlots()) cx->free_(b->slots); - unsigned afixed = a->numFixedSlots(); - unsigned bfixed = b->numFixedSlots(); + void *apriv = a->hasPrivate() ? a->getPrivate() : NULL; + void *bpriv = b->hasPrivate() ? b->getPrivate() : NULL; char tmp[sizeof(JSObject)]; memcpy(&tmp, a, sizeof tmp); memcpy(a, b, sizeof tmp); memcpy(b, &tmp, sizeof tmp); - a->updateFixedSlots(afixed); - a->slots = reserved.newaslots; - a->capacity = Max(afixed, bcap); - a->copySlotRange(0, reserved.bvals.begin(), bcap, false); - a->clearSlotRange(bcap, a->capacity - bcap); + if (a->isNative()) + a->shape_->setNumFixedSlots(reserved.newafixed); + else + a->shape_ = reserved.newashape; + + a->slots = reserved.newaslots; + a->copySlotRange(0, reserved.bvals.begin(), bcap, false); + if (a->hasPrivate()) + a->setPrivate(bpriv); + + if (b->isNative()) + b->shape_->setNumFixedSlots(reserved.newbfixed); + else + b->shape_ = reserved.newbshape; - b->updateFixedSlots(bfixed); b->slots = reserved.newbslots; - b->capacity = Max(bfixed, acap); b->copySlotRange(0, reserved.avals.begin(), acap, false); - b->clearSlotRange(acap, b->capacity - acap); + if (b->hasPrivate()) + b->setPrivate(apriv); /* Make sure the destructor for reserved doesn't free the slots. */ reserved.newaslots = NULL; reserved.newbslots = NULL; } - - a->newType = newTypeA; - b->newType = newTypeB; } /* @@ -3968,7 +4134,7 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp) if (xdr->mode == JSXDR_ENCODE) { obj = *objp; - parent = obj->getParent(); + parent = obj->getStaticBlockScopeChain(); parentId = JSScript::isValidOffset(xdr->script->objectsOffset) ? FindObjectIndex(xdr->script->objects(), parent) : NO_PARENT_INDEX; @@ -3999,7 +4165,7 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp) parent = NULL; else parent = xdr->script->getObject(parentId); - obj->setParent(parent); + obj->setStaticBlockScopeChain(parent); } AutoObjectRooter tvr(cx, obj); @@ -4032,7 +4198,7 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp) for (Shape::Range r(obj->lastProperty()); !r.empty(); r.popFront()) { shape = &r.front(); - shapes[shape->shortid] = shape; + shapes[shape->shortid()] = shape; } /* @@ -4043,12 +4209,12 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp) shape = shapes[i]; JS_ASSERT(shape->getter() == block_getProperty); - jsid propid = shape->propid; + jsid propid = shape->propid(); JS_ASSERT(JSID_IS_ATOM(propid)); JSAtom *atom = JSID_TO_ATOM(propid); #ifdef DEBUG - uint16 shortid = uint16(shape->shortid); + uint16 shortid = uint16(shape->shortid()); JS_ASSERT(shortid == i); #endif @@ -4064,7 +4230,9 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp) Class js::BlockClass = { "Block", - JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1) | JSCLASS_IS_ANONYMOUS, + JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_RESERVED_SLOTS(BLOCK_RESERVED_SLOTS) | + JSCLASS_IS_ANONYMOUS, JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ @@ -4089,9 +4257,6 @@ DefineStandardSlot(JSContext *cx, JSObject *obj, JSProtoKey key, JSAtom *atom, JS_ASSERT(obj->isGlobal()); JS_ASSERT(obj->isNative()); - if (!obj->ensureClassReservedSlots(cx)) - return false; - const Shape *shape = obj->nativeLookup(cx, id); if (!shape) { uint32 slot = 2 * JSProto_LIMIT + key; @@ -4140,7 +4305,7 @@ DefineConstructorAndPrototype(JSContext *cx, JSObject *obj, JSProtoKey key, JSAt Native constructor, uintN nargs, JSPropertySpec *ps, JSFunctionSpec *fs, JSPropertySpec *static_ps, JSFunctionSpec *static_fs, - JSObject **ctorp) + JSObject **ctorp, AllocKind ctorKind) { /* * Create a prototype object for this class. @@ -4171,7 +4336,7 @@ DefineConstructorAndPrototype(JSContext *cx, JSObject *obj, JSProtoKey key, JSAt * [which already needs to happen for bug 638316], figure out nicer * semantics for null-protoProto, and use createBlankPrototype.) */ - JSObject *proto = NewObject(cx, clasp, protoProto, obj); + JSObject *proto = NewObjectWithClassProto(cx, clasp, protoProto, obj); if (!proto) return NULL; @@ -4181,12 +4346,6 @@ DefineConstructorAndPrototype(JSContext *cx, JSObject *obj, JSProtoKey key, JSAt if (clasp == &ArrayClass && !proto->makeDenseArraySlow(cx)) return NULL; - TypeObject *type = proto->getNewType(cx); - if (!type || !type->getEmptyShape(cx, proto->getClass(), FINALIZE_OBJECT0)) - return NULL; - - proto->syncSpecialEquality(); - /* After this point, control must exit via label bad or out. */ JSObject *ctor; bool named = false; @@ -4215,7 +4374,8 @@ DefineConstructorAndPrototype(JSContext *cx, JSObject *obj, JSProtoKey key, JSAt * perhaps as part of bug 638316.) */ JSFunction *fun = - js_NewFunction(cx, NULL, constructor, nargs, JSFUN_CONSTRUCTOR, obj, atom); + js_NewFunction(cx, NULL, constructor, nargs, JSFUN_CONSTRUCTOR, obj, atom, + ctorKind); if (!fun) goto bad; fun->setConstructorClass(clasp); @@ -4320,7 +4480,7 @@ js_InitClass(JSContext *cx, JSObject *obj, JSObject *protoProto, Class *clasp, Native constructor, uintN nargs, JSPropertySpec *ps, JSFunctionSpec *fs, JSPropertySpec *static_ps, JSFunctionSpec *static_fs, - JSObject **ctorp) + JSObject **ctorp, AllocKind ctorKind) { JSAtom *atom = js_Atomize(cx, clasp->name, strlen(clasp->name)); if (!atom) @@ -4346,62 +4506,147 @@ js_InitClass(JSContext *cx, JSObject *obj, JSObject *protoProto, } return DefineConstructorAndPrototype(cx, obj, key, atom, protoProto, clasp, constructor, nargs, - ps, fs, static_ps, static_fs, ctorp); -} - -void -JSObject::clearSlotRange(size_t start, size_t length) -{ - JS_ASSERT(start + length <= capacity); - if (isDenseArray()) { - ClearValueRange(compartment(), slots + start, length, true); - } else { - size_t fixed = numFixedSlots(); - if (start < fixed) { - if (start + length < fixed) { - ClearValueRange(compartment(), fixedSlots() + start, length, false); - } else { - size_t localClear = fixed - start; - ClearValueRange(compartment(), fixedSlots() + start, localClear, false); - ClearValueRange(compartment(), slots, length - localClear, false); - } - } else { - ClearValueRange(compartment(), slots + start - fixed, length, false); - } - } + ps, fs, static_ps, static_fs, ctorp, ctorKind); } void JSObject::copySlotRange(size_t start, const Value *vector, size_t length, bool valid) { - JS_ASSERT(start + length <= capacity); - if (valid) prepareSlotRangeForOverwrite(start, start + length); - if (isDenseArray()) { - memcpy(slots + start, vector, length * sizeof(Value)); - } else { - size_t fixed = numFixedSlots(); - if (start < fixed) { - if (start + length < fixed) { - memcpy(fixedSlots() + start, vector, length * sizeof(Value)); - } else { - size_t localCopy = fixed - start; - memcpy(fixedSlots() + start, vector, localCopy * sizeof(Value)); - memcpy(slots, vector + localCopy, (length - localCopy) * sizeof(Value)); - } + JS_ASSERT(!isDenseArray()); + JS_ASSERT(slotInRange(start + length, SENTINEL_ALLOWED)); + size_t fixed = numFixedSlots(); + if (start < fixed) { + if (start + length < fixed) { + memcpy(fixedSlots() + start, vector, length * sizeof(Value)); } else { - memcpy(slots + start - fixed, vector, length * sizeof(Value)); + size_t localCopy = fixed - start; + memcpy(fixedSlots() + start, vector, localCopy * sizeof(Value)); + memcpy(slots, vector + localCopy, (length - localCopy) * sizeof(Value)); } + } else { + memcpy(slots + start - fixed, vector, length * sizeof(Value)); } } -bool -JSObject::allocSlots(JSContext *cx, size_t newcap) +inline void +JSObject::invalidateSlotRange(size_t start, size_t length) { - JS_ASSERT(newcap >= numSlots() && !hasSlotsArray()); - size_t oldSize = slotsAndStructSize(); +#ifdef DEBUG + JS_ASSERT(!isDenseArray()); + + size_t fixed = numFixedSlots(); + + /* No bounds checks, allocated space has been updated but not the shape. */ + if (start < fixed) { + if (start + length < fixed) { + Debug_SetValueRangeToCrashOnTouch(fixedSlots() + start, length); + } else { + size_t localClear = fixed - start; + Debug_SetValueRangeToCrashOnTouch(fixedSlots() + start, localClear); + Debug_SetValueRangeToCrashOnTouch(slots, length - localClear); + } + } else { + Debug_SetValueRangeToCrashOnTouch(slots + start - fixed, length); + } +#endif /* DEBUG */ +} + +inline bool +JSObject::updateSlotsForSpan(JSContext *cx, size_t oldSpan, size_t newSpan) +{ + JS_ASSERT(oldSpan != newSpan); + + size_t oldCount = dynamicSlotsCount(numFixedSlots(), oldSpan); + size_t newCount = dynamicSlotsCount(numFixedSlots(), newSpan); + + if (oldSpan < newSpan) { + if (oldCount < newCount && !growSlots(cx, oldCount, newCount)) + return false; + + if (newSpan == oldSpan + 1) + initSlotUnchecked(oldSpan, UndefinedValue()); + else + initializeSlotRange(oldSpan, newSpan - oldSpan); + } else { + /* Trigger write barriers on the old slots before reallocating. */ + prepareSlotRangeForOverwrite(newSpan, oldSpan); + invalidateSlotRange(newSpan, oldSpan - newSpan); + + if (oldCount > newCount) + shrinkSlots(cx, oldCount, newCount); + } + + return true; +} + +bool +JSObject::setLastProperty(JSContext *cx, const js::Shape *shape) +{ + JS_ASSERT(!inDictionaryMode()); + JS_ASSERT(!shape->inDictionary()); + JS_ASSERT(shape->compartment() == compartment()); + JS_ASSERT(shape->numFixedSlots() == numFixedSlots()); + + size_t oldSpan = lastProperty()->slotSpan(); + size_t newSpan = shape->slotSpan(); + + if (oldSpan == newSpan) { + shape_ = const_cast(shape); + return true; + } + + if (!updateSlotsForSpan(cx, oldSpan, newSpan)) + return false; + + shape_ = const_cast(shape); + return true; +} + +bool +JSObject::setSlotSpan(JSContext *cx, uint32 span) +{ + JS_ASSERT(inDictionaryMode()); + js::BaseShape *base = lastProperty()->base(); + + size_t oldSpan = base->slotSpan(); + + if (oldSpan == span) + return true; + + if (!updateSlotsForSpan(cx, oldSpan, span)) + return false; + + base->setSlotSpan(span); + return true; +} + +bool +JSObject::growSlots(JSContext *cx, uint32 oldCount, uint32 newCount) +{ + JS_ASSERT(newCount > oldCount); + JS_ASSERT(newCount >= SLOT_CAPACITY_MIN); + JS_ASSERT(!isDenseArray()); + + /* + * Slots are only allocated for call objects when new properties are + * added to them, which can only happen while the call is still on the + * stack (and an eval, DEFFUN, etc. happens). We thus do not need to + * worry about updating any active outer function args/vars. + */ + JS_ASSERT_IF(isCall(), asCall().maybeStackFrame() != NULL); + + /* + * Slot capacities are determined by the span of allocated objects. Due to + * the limited number of bits to store shape slots, object growth is + * throttled well before the slot capacity can overflow. + */ + JS_ASSERT(newCount < NELEMENTS_LIMIT); + + size_t oldSize = Probes::objectResizeActive() ? slotsAndStructSize() : 0; + size_t newSize = oldSize + (newCount - oldCount) * sizeof(Value); /* * If we are allocating slots for an object whose type is always created @@ -4409,7 +4654,7 @@ JSObject::allocSlots(JSContext *cx, size_t newcap) * type to give these objects a larger number of fixed slots when future * objects are constructed. */ - if (!hasLazyType() && type()->newScript) { + if (!hasLazyType() && !oldCount && type()->newScript) { gc::AllocKind kind = type()->newScript->allocKind; unsigned newScriptSlots = gc::GetGCKindSlots(kind); if (newScriptSlots == numFixedSlots() && gc::TryIncrementAllocKind(&kind)) { @@ -4424,111 +4669,42 @@ JSObject::allocSlots(JSContext *cx, size_t newcap) } } - if (newcap > NSLOTS_LIMIT) { - js_ReportAllocationOverflow(cx); - return false; + if (!oldCount) { + slots = (HeapValue *) cx->malloc_(newCount * sizeof(HeapValue)); + if (!slots) + return false; + Debug_SetValueRangeToCrashOnTouch(slots, newCount); + if (Probes::objectResizeActive()) + Probes::resizeObject(cx, this, oldSize, newSize); + return true; } - uint32 allocCount = numDynamicSlots(newcap); + HeapValue *newslots = (HeapValue*) cx->realloc_(slots, oldCount * sizeof(HeapValue), + newCount * sizeof(HeapValue)); + if (!newslots) + return false; /* Leave slots at its old size. */ - HeapValue *tmpslots = (HeapValue*) cx->malloc_(allocCount * sizeof(HeapValue)); - if (!tmpslots) - return false; /* Leave slots at inline buffer. */ - slots = tmpslots; - capacity = newcap; + bool changed = slots != newslots; + slots = newslots; - if (isDenseArray()) { - /* Copy over anything from the inline buffer. */ - memcpy(slots, fixedSlots(), getDenseArrayInitializedLength() * sizeof(HeapValue)); - if (!cx->typeInferenceEnabled()) - backfillDenseArrayHoles(cx); - } else { - /* Clear out the new slots without copying. */ - InitValueRange(slots, allocCount, false); - } - - Probes::resizeObject(cx, this, oldSize, slotsAndStructSize()); - - return true; -} - -bool -JSObject::growSlots(JSContext *cx, size_t newcap) -{ - /* - * Slots are only allocated for call objects when new properties are - * added to them, which can only happen while the call is still on the - * stack (and an eval, DEFFUN, etc. happens). We thus do not need to - * worry about updating any active outer function args/vars. - */ - JS_ASSERT_IF(isCall(), asCall().maybeStackFrame() != NULL); - - /* - * When an object with CAPACITY_DOUBLING_MAX or fewer slots needs to - * grow, double its capacity, to add N elements in amortized O(N) time. - * - * Above this limit, grow by 12.5% each time. Speed is still amortized - * O(N), with a higher constant factor, and we waste less space. - */ - static const size_t CAPACITY_DOUBLING_MAX = 1024 * 1024; - static const size_t CAPACITY_CHUNK = CAPACITY_DOUBLING_MAX / sizeof(Value); - - uint32 oldcap = numSlots(); - JS_ASSERT(oldcap < newcap); - - size_t oldSize = slotsAndStructSize(); - - uint32 nextsize = (oldcap <= CAPACITY_DOUBLING_MAX) - ? oldcap * 2 - : oldcap + (oldcap >> 3); - - uint32 actualCapacity = JS_MAX(newcap, nextsize); - if (actualCapacity >= CAPACITY_CHUNK) - actualCapacity = JS_ROUNDUP(actualCapacity, CAPACITY_CHUNK); - else if (actualCapacity < SLOT_CAPACITY_MIN) - actualCapacity = SLOT_CAPACITY_MIN; - - /* Don't let nslots get close to wrapping around uint32. */ - if (actualCapacity >= NSLOTS_LIMIT) { - JS_ReportOutOfMemory(cx); - return false; - } - - /* If nothing was allocated yet, treat it as initial allocation. */ - if (!hasSlotsArray()) - return allocSlots(cx, actualCapacity); - - uint32 oldAllocCount = numDynamicSlots(oldcap); - uint32 allocCount = numDynamicSlots(actualCapacity); - - HeapValue *tmpslots = (HeapValue*) cx->realloc_(slots, oldAllocCount * sizeof(HeapValue), - allocCount * sizeof(HeapValue)); - if (!tmpslots) - return false; /* Leave dslots as its old size. */ - - bool changed = slots != tmpslots; - slots = tmpslots; - capacity = actualCapacity; - - if (isDenseArray()) { - if (!cx->typeInferenceEnabled()) - backfillDenseArrayHoles(cx); - } else { - /* Clear the new slots we added. */ - InitValueRange(slots + oldAllocCount, allocCount - oldAllocCount, false); - } + Debug_SetValueRangeToCrashOnTouch(slots + oldCount, newCount - oldCount); + /* Changes in the slots of global objects can trigger recompilation. */ if (changed && isGlobal()) types::MarkObjectStateChange(cx, this); - Probes::resizeObject(cx, this, oldSize, slotsAndStructSize()); + if (Probes::objectResizeActive()) + Probes::resizeObject(cx, this, oldSize, newSize); return true; } void -JSObject::shrinkSlots(JSContext *cx, size_t newcap) +JSObject::shrinkSlots(JSContext *cx, uint32 oldCount, uint32 newCount) { + JS_ASSERT(newCount < oldCount); + JS_ASSERT(!isDenseArray()); + /* * Refuse to shrink slots for call objects. This only happens in a very * obscure situation (deleting names introduced by a direct 'eval') and @@ -4538,69 +4714,141 @@ JSObject::shrinkSlots(JSContext *cx, size_t newcap) if (isCall()) return; - JS_ASSERT_IF(isDenseArray(), initializedLength() <= newcap); + size_t oldSize = Probes::objectResizeActive() ? slotsAndStructSize() : 0; + size_t newSize = oldSize - (oldCount - newCount) * sizeof(Value); - uint32 oldcap = numSlots(); - JS_ASSERT(newcap <= oldcap); - JS_ASSERT(newcap >= slotSpan()); - - size_t oldSize = slotsAndStructSize(); - - if (oldcap <= SLOT_CAPACITY_MIN || !hasSlotsArray()) { - /* - * We won't shrink the slots any more. Clear excess entries. When - * shrinking dense arrays, make sure to update the initialized length - * afterwards. - */ - if (!isDenseArray()) - clearSlotRange(newcap, oldcap - newcap); + if (newCount == 0) { + cx->free_(slots); + slots = NULL; + if (Probes::objectResizeActive()) + Probes::resizeObject(cx, this, oldSize, newSize); return; } - uint32 fill = newcap; - if (isDenseArray()) { - newcap = Max(newcap, size_t(SLOT_CAPACITY_MIN)); - newcap = Max(newcap, numFixedSlots()); - } else { - newcap = Max(newcap, numFixedSlots() + SLOT_CAPACITY_MIN); - } + JS_ASSERT(newCount >= SLOT_CAPACITY_MIN); - uint32 allocCount = numDynamicSlots(newcap); - - HeapValue *tmpslots = (HeapValue*) cx->realloc_(slots, allocCount * sizeof(Value)); - if (!tmpslots) + HeapValue *newslots = (HeapValue*) cx->realloc_(slots, newCount * sizeof(HeapValue)); + if (!newslots) return; /* Leave slots at its old size. */ - bool changed = slots != tmpslots; - slots = tmpslots; - capacity = newcap; - - if (fill < newcap) { - /* - * Clear any excess holes if we tried to shrink below SLOT_CAPACITY_MIN - * or numFixedSlots(). As above, caller must update the initialized - * length for dense arrays. - */ - if (!isDenseArray()) - clearSlotRange(fill, newcap - fill); - } + bool changed = slots != newslots; + slots = newslots; + /* Watch for changes in global object slots, as for growSlots. */ if (changed && isGlobal()) types::MarkObjectStateChange(cx, this); - Probes::resizeObject(cx, this, oldSize, slotsAndStructSize()); + if (Probes::objectResizeActive()) + Probes::resizeObject(cx, this, oldSize, newSize); } bool -JSObject::ensureInstanceReservedSlots(JSContext *cx, size_t nreserved) +JSObject::growElements(JSContext *cx, uintN newcap) { - JS_ASSERT_IF(isNative(), - isBlock() || isCall() || (isFunction() && isBoundFunction())); + JS_ASSERT(isDenseArray()); - uintN nslots = JSSLOT_FREE(getClass()) + nreserved; - return nslots <= numSlots() || allocSlots(cx, nslots); + /* + * When an object with CAPACITY_DOUBLING_MAX or fewer elements needs to + * grow, double its capacity, to add N elements in amortized O(N) time. + * + * Above this limit, grow by 12.5% each time. Speed is still amortized + * O(N), with a higher constant factor, and we waste less space. + */ + static const size_t CAPACITY_DOUBLING_MAX = 1024 * 1024; + static const size_t CAPACITY_CHUNK = CAPACITY_DOUBLING_MAX / sizeof(Value); + + uint32 oldcap = getDenseArrayCapacity(); + JS_ASSERT(oldcap <= newcap); + + size_t oldSize = Probes::objectResizeActive() ? slotsAndStructSize() : 0; + + uint32 nextsize = (oldcap <= CAPACITY_DOUBLING_MAX) + ? oldcap * 2 + : oldcap + (oldcap >> 3); + + uint32 actualCapacity = JS_MAX(newcap, nextsize); + if (actualCapacity >= CAPACITY_CHUNK) + actualCapacity = JS_ROUNDUP(actualCapacity, CAPACITY_CHUNK); + else if (actualCapacity < SLOT_CAPACITY_MIN) + actualCapacity = SLOT_CAPACITY_MIN; + + /* Don't let nelements get close to wrapping around uint32. */ + if (actualCapacity >= NELEMENTS_LIMIT || actualCapacity < oldcap || actualCapacity < newcap) { + JS_ReportOutOfMemory(cx); + return false; + } + + uint32 initlen = getDenseArrayInitializedLength(); + uint32 newAllocated = actualCapacity + ObjectElements::VALUES_PER_HEADER; + + ObjectElements *newheader; + if (hasDynamicElements()) { + uint32 oldAllocated = oldcap + ObjectElements::VALUES_PER_HEADER; + newheader = (ObjectElements *) + cx->realloc_(getElementsHeader(), oldAllocated * sizeof(Value), + newAllocated * sizeof(Value)); + if (!newheader) + return false; /* Leave elements as its old size. */ + } else { + newheader = (ObjectElements *) cx->malloc_(newAllocated * sizeof(Value)); + if (!newheader) + return false; /* Ditto. */ + memcpy(newheader, getElementsHeader(), + (ObjectElements::VALUES_PER_HEADER + initlen) * sizeof(Value)); + } + + newheader->capacity = actualCapacity; + elements = newheader->elements(); + + Debug_SetValueRangeToCrashOnTouch(elements + initlen, actualCapacity - initlen); + + if (Probes::objectResizeActive()) + Probes::resizeObject(cx, this, oldSize, slotsAndStructSize()); + + return true; } +void +JSObject::shrinkElements(JSContext *cx, uintN newcap) +{ + JS_ASSERT(isDenseArray()); + + uint32 oldcap = getDenseArrayCapacity(); + JS_ASSERT(newcap <= oldcap); + + size_t oldSize = Probes::objectResizeActive() ? slotsAndStructSize() : 0; + + /* Don't shrink elements below the minimum capacity. */ + if (oldcap <= SLOT_CAPACITY_MIN || !hasDynamicElements()) + return; + + newcap = Max(newcap, SLOT_CAPACITY_MIN); + + uint32 newAllocated = newcap + ObjectElements::VALUES_PER_HEADER; + + ObjectElements *newheader = (ObjectElements *) + cx->realloc_(getElementsHeader(), newAllocated * sizeof(Value)); + if (!newheader) + return; /* Leave elements at its old size. */ + + newheader->capacity = newcap; + elements = newheader->elements(); + + if (Probes::objectResizeActive()) + Probes::resizeObject(cx, this, oldSize, slotsAndStructSize()); +} + +#ifdef DEBUG +bool +JSObject::slotInRange(uintN slot, SentinelAllowed sentinel) const +{ + size_t capacity = numFixedSlots() + numDynamicSlots(); + if (sentinel == SENTINEL_ALLOWED) + return slot <= capacity; + return slot < capacity; +} +#endif /* DEBUG */ + static JSObject * js_InitNullClass(JSContext *cx, JSObject *obj) { @@ -4626,24 +4874,21 @@ SetProto(JSContext *cx, JSObject *obj, JSObject *proto, bool checkForCycles) JS_ASSERT_IF(!checkForCycles, obj != proto); JS_ASSERT(obj->isExtensible()); - if (obj->isNative()) { - if (!obj->ensureClassReservedSlots(cx)) - return false; - } - if (proto && proto->isXML()) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_XML_PROTO_FORBIDDEN); return false; } /* - * Regenerate property cache shape ids for all of the scopes along the - * old prototype chain to invalidate their property cache entries, in - * case any entries were filled by looking up through obj. + * Regenerate shapes for all of the scopes along the old prototype chain, + * in case any entries were filled by looking up through obj. Stop when an + * non-native object is found, prototype lookups will not be cached across + * these. */ JSObject *oldproto = obj; while (oldproto && oldproto->isNative()) { - oldproto->protoShapeChange(cx); + if (!oldproto->setUncacheableProto(cx)) + return false; oldproto = oldproto->getProto(); } @@ -4668,9 +4913,12 @@ SetProto(JSContext *cx, JSObject *obj, JSObject *proto, bool checkForCycles) return true; } + if (proto && !proto->setNewTypeUnknown(cx)) + return false; + TypeObject *type = proto - ? proto->getNewType(cx, NULL, /* markUnknown = */ true) - : &emptyTypeObject; + ? proto->getNewType(cx, NULL) + : cx->compartment->getEmptyType(cx); if (!type) return false; @@ -4747,11 +4995,7 @@ js_FindClassObject(JSContext *cx, JSObject *start, JSProtoKey protoKey, start = &fp->scopeChain(); if (start) { - /* Find the topmost object in the scope chain. */ - do { - obj = start; - start = obj->getParent(); - } while (start); + obj = start->getGlobal(); } else { obj = cx->globalObject; if (!obj) { @@ -4787,8 +5031,8 @@ js_FindClassObject(JSContext *cx, JSObject *start, JSProtoKey protoKey, Value v = UndefinedValue(); if (prop && pobj->isNative()) { shape = (Shape *) prop; - if (pobj->containsSlot(shape->slot)) { - v = pobj->nativeGetSlot(shape->slot); + if (pobj->containsSlot(shape->slot())) { + v = pobj->nativeGetSlot(shape->slot()); if (v.isPrimitive()) v.setUndefined(); } @@ -4831,11 +5075,10 @@ js_ConstructObject(JSContext *cx, Class *clasp, JSObject *proto, JSObject *paren proto = rval.toObjectOrNull(); } - JSObject *obj = NewObject(cx, clasp, proto, parent); + JSObject *obj = NewObjectWithClassProto(cx, clasp, proto, parent); if (!obj) return NULL; - obj->syncSpecialEquality(); MarkTypeObjectUnknownProperties(cx, obj->type()); Value rval; @@ -4865,12 +5108,12 @@ JSObject::allocSlot(JSContext *cx, uint32 *slotp) JS_ASSERT(slot >= JSSLOT_FREE(getClass())); /* - * If this object is in dictionary mode and it has a property table, try to - * pull a free slot from the property table's slot-number freelist. + * If this object is in dictionary mode, try to pull a free slot from the + * property table's slot-number freelist. */ - if (inDictionaryMode() && lastProp->hasTable()) { - PropertyTable *table = lastProp->getTable(); - uint32 last = table->freelist; + if (inDictionaryMode()) { + PropertyTable &table = lastProperty()->table(); + uint32 last = table.freelist; if (last != SHAPE_INVALID_SLOT) { #ifdef DEBUG JS_ASSERT(last < slot); @@ -4881,51 +5124,51 @@ JSObject::allocSlot(JSContext *cx, uint32 *slotp) *slotp = last; const Value &vref = getSlot(last); - table->freelist = vref.toPrivateUint32(); + table.freelist = vref.toPrivateUint32(); setSlot(last, UndefinedValue()); return true; } } - if (slot >= numSlots() && !growSlots(cx, slot + 1)) + if (slot >= SHAPE_MAXIMUM_SLOT) { + js_ReportOutOfMemory(cx); + return false; + } + + *slotp = slot; + + if (inDictionaryMode() && !setSlotSpan(cx, slot + 1)) return false; - /* JSObject::growSlots or JSObject::freeSlot should set the free slots to void. */ - JS_ASSERT(getSlot(slot).isUndefined()); - *slotp = slot; return true; } -bool +void JSObject::freeSlot(JSContext *cx, uint32 slot) { - uint32 limit = slotSpan(); - JS_ASSERT(slot < limit); + JS_ASSERT(slot < slotSpan()); - if (inDictionaryMode() && lastProp->hasTable()) { - uint32 &last = lastProp->getTable()->freelist; + if (inDictionaryMode()) { + uint32 &last = lastProperty()->table().freelist; /* Can't afford to check the whole freelist, but let's check the head. */ - JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < limit && last != slot); + JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < slotSpan() && last != slot); /* - * Freeing a slot other than the last one mapped by this object's - * shape (and not a reserved slot; see bug 595230): push the slot onto - * the dictionary property table's freelist. We want to let the last - * slot be freed by shrinking the dslots vector; see js_TraceObject. + * Place all freed slots other than reserved slots (bug 595230) on the + * dictionary's free list. */ - if (JSSLOT_FREE(getClass()) <= slot && slot + 1 < limit) { + if (JSSLOT_FREE(getClass()) <= slot) { JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < slotSpan()); setSlot(slot, PrivateUint32Value(last)); last = slot; - return true; + return; } } setSlot(slot, UndefinedValue()); - return false; } -static JSBool +static bool PurgeProtoChain(JSContext *cx, JSObject *obj, jsid id) { const Shape *shape; @@ -4938,15 +5181,19 @@ PurgeProtoChain(JSContext *cx, JSObject *obj, jsid id) shape = obj->nativeLookup(cx, id); if (shape) { PCMETER(JS_PROPERTY_CACHE(cx).pcpurges++); + if (!obj->shadowingShapeChange(cx, *shape)) + return false; + obj->shadowingShapeChange(cx, *shape); - return JS_TRUE; + return true; } obj = obj->getProto(); } - return JS_FALSE; + + return true; } -void +bool js_PurgeScopeChainHelper(JSContext *cx, JSObject *obj, jsid id) { JS_ASSERT(obj->isDelegate()); @@ -4959,14 +5206,16 @@ js_PurgeScopeChainHelper(JSContext *cx, JSObject *obj, jsid id) * may gain such properties via eval introducing new vars; see bug 490364. */ if (obj->isCall()) { - while ((obj = obj->getParent()) != NULL) { - if (PurgeProtoChain(cx, obj, id)) - break; + while ((obj = obj->scopeChain()) != NULL) { + if (!PurgeProtoChain(cx, obj, id)) + return false; } } + + return true; } -const Shape * +Shape * js_AddNativeProperty(JSContext *cx, JSObject *obj, jsid id, PropertyOp getter, StrictPropertyOp setter, uint32 slot, uintN attrs, uintN flags, intN shortid) @@ -4981,40 +5230,27 @@ js_AddNativeProperty(JSContext *cx, JSObject *obj, jsid id, * this optimistically (assuming no failure below) before locking obj, so * we can lock the shadowed scope. */ - js_PurgeScopeChain(cx, obj, id); - - if (!obj->ensureClassReservedSlots(cx)) + if (!js_PurgeScopeChain(cx, obj, id)) return NULL; return obj->putProperty(cx, id, getter, setter, slot, attrs, flags, shortid); } -const Shape * +Shape * js_ChangeNativePropertyAttrs(JSContext *cx, JSObject *obj, - const Shape *shape, uintN attrs, uintN mask, + Shape *shape, uintN attrs, uintN mask, PropertyOp getter, StrictPropertyOp setter) { - if (!obj->ensureClassReservedSlots(cx)) - return NULL; - /* * Check for freezing an object with shape-memoized methods here, on a - * shape-by-shape basis. Note that getter may be a pun of the method's - * joined function object value, to indicate "no getter change". In this - * case we must null getter to get the desired JS_PropertyStub behavior. + * shape-by-shape basis. */ if ((attrs & JSPROP_READONLY) && shape->isMethod()) { - JSObject *funobj = &shape->methodObject(); - Value v = ObjectValue(*funobj); + Value v = ObjectValue(*obj->nativeGetMethod(shape)); shape = obj->methodReadBarrier(cx, *shape, &v); if (!shape) return NULL; - - if (CastAsObject(getter) == funobj) { - JS_ASSERT(!(attrs & JSPROP_GETTER)); - getter = NULL; - } } return obj->changeProperty(cx, shape, attrs, mask, getter, setter); @@ -5049,10 +5285,10 @@ CallAddPropertyHook(JSContext *cx, Class *clasp, JSObject *obj, const Shape *sha if (clasp->addProperty != JS_PropertyStub) { Value nominal = *vp; - if (!CallJSPropertyOp(cx, clasp->addProperty, obj, shape->propid, vp)) + if (!CallJSPropertyOp(cx, clasp->addProperty, obj, shape->propid(), vp)) return false; if (*vp != nominal) { - if (obj->containsSlot(shape->slot)) + if (obj->containsSlot(shape->slot())) obj->nativeSetSlotWithType(cx, shape, *vp); } } @@ -5077,7 +5313,7 @@ DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &value, * update the attributes and property ops. A getter or setter is really * only half of a property. */ - const Shape *shape = NULL; + Shape *shape = NULL; if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) { JSObject *pobj; JSProperty *prop; @@ -5094,7 +5330,7 @@ DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &value, if (!js_LookupProperty(cx, obj, id, &pobj, &prop)) return NULL; if (prop && pobj == obj) { - shape = (const Shape *) prop; + shape = (Shape *) prop; if (shape->isAccessorDescriptor()) { shape = obj->changeProperty(cx, shape, attrs, JSPROP_GETTER | JSPROP_SETTER, @@ -5117,16 +5353,10 @@ DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &value, * to be shadowed in obj's scope chain unless it is known a priori that it * is not possible. We do this before locking obj to avoid nesting locks. */ - if (!(defineHow & DNP_DONT_PURGE)) - js_PurgeScopeChain(cx, obj, id); - - /* - * Check whether a readonly property or setter is being defined on a known - * prototype object. See the comment in jscntxt.h before protoHazardShape's - * member declaration. - */ - if (obj->isDelegate() && (attrs & (JSPROP_READONLY | JSPROP_SETTER))) - cx->runtime->protoHazardShape = js_GenerateShape(cx); + if (!(defineHow & DNP_DONT_PURGE)) { + if (!js_PurgeScopeChain(cx, obj, id)) + return NULL; + } /* Use the object's class getter and setter by default. */ Class *clasp = obj->getClass(); @@ -5148,10 +5378,6 @@ DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &value, MarkTypePropertyConfigured(cx, obj, id); } - /* Get obj's own scope if it has one, or create a new one for obj. */ - if (!obj->ensureClassReservedSlots(cx)) - return NULL; - /* * Make a local copy of value, in case a method barrier needs to update the * value to define, and just so addProperty can mutate its inout parameter. @@ -5168,29 +5394,19 @@ DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &value, JS_ASSERT(!getter && !setter); JSObject *funobj = &value.toObject(); - if (funobj->getFunctionPrivate() == funobj) { + if (!funobj->toFunction()->isClonedMethod()) flags |= Shape::METHOD; - getter = CastAsPropertyOp(funobj); - } } if (const Shape *existingShape = obj->nativeLookup(cx, id)) { if (existingShape->isMethod() && - ObjectValue(existingShape->methodObject()) == valueCopy) + ObjectValue(*obj->nativeGetMethod(existingShape)) == valueCopy) { /* * Redefining an existing shape-memoized method object without * changing the property's value, perhaps to change attributes. * Clone now via the method read barrier. - * - * But first, assert that our caller is not trying to preserve - * the joined function object value as the getter object for - * the redefined property. The joined function object cannot - * yet have leaked, so only an internal code path could attempt - * such a thing. Any such path would be a bug to fix. */ - JS_ASSERT(existingShape->getter() != getter); - if (!obj->methodReadBarrier(cx, *existingShape, &valueCopy)) return NULL; } @@ -5198,32 +5414,15 @@ DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &value, adding = true; } - uint32 oldShape = obj->shape(); shape = obj->putProperty(cx, id, getter, setter, SHAPE_INVALID_SLOT, attrs, flags, shortid); if (!shape) return NULL; - - /* - * If shape is a joined method, the above call to putProperty suffices - * to update the object's shape id if need be (because the shape's hash - * identity includes the method value). - * - * But if scope->branded(), the object's shape id may not have changed - * and we may be overwriting a cached function-valued property (note - * how methodWriteBarrier checks previous vs. would-be current value). - * See bug 560998. - */ - if (obj->shape() == oldShape && obj->branded() && shape->slot != SHAPE_INVALID_SLOT) { - DebugOnly newshape = - obj->methodWriteBarrier(cx, *shape, valueCopy); - JS_ASSERT(newshape == shape); - } } /* Store valueCopy before calling addProperty, in case the latter GC's. */ - if (obj->containsSlot(shape->slot)) - obj->nativeSetSlot(shape->slot, valueCopy); + if (shape->hasSlot() && obj->containsSlot(shape->slot())) + obj->nativeSetSlot(shape->slot(), valueCopy); /* XXXbe called with lock held */ if (!CallAddPropertyHook(cx, clasp, obj, shape, &valueCopy)) { @@ -5231,10 +5430,6 @@ DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &value, return NULL; } - if (defineHow & DNP_CACHE_RESULT) { - if (adding) - JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, obj, shape, true); - } return shape; } @@ -5449,7 +5644,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, bool cacheResult, bool global, /* Scan entries on the scope chain that we can cache across. */ entry = JS_NO_PROP_CACHE_FILL; obj = scopeChain; - parent = obj->getParent(); + parent = obj->scopeChain(); for (scopeIndex = 0; parent ? IsCacheableNonGlobalScope(obj) @@ -5495,7 +5690,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, bool cacheResult, bool global, goto out; } obj = parent; - parent = obj->getParent(); + parent = obj->scopeChain(); } for (;;) { @@ -5510,7 +5705,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, bool cacheResult, bool global, * We conservatively assume that a resolve hook could mutate the scope * chain during JSObject::lookupGeneric. So we read parent here again. */ - parent = obj->getParent(); + parent = obj->scopeChain(); if (!parent) { pobj = NULL; break; @@ -5544,7 +5739,7 @@ js_FindIdentifierBase(JSContext *cx, JSObject *scopeChain, jsid id) * This function should not be called for a global object or from the * trace and should have a valid cache entry for native scopeChain. */ - JS_ASSERT(scopeChain->getParent()); + JS_ASSERT(scopeChain->scopeChain() != NULL); JSObject *obj = scopeChain; @@ -5558,7 +5753,7 @@ js_FindIdentifierBase(JSContext *cx, JSObject *scopeChain, jsid id) * must not be passed a global object (i.e. one with null parent). */ for (int scopeIndex = 0; - !obj->getParent() || IsCacheableNonGlobalScope(obj); + obj->isGlobal() || IsCacheableNonGlobalScope(obj); scopeIndex++) { JSObject *pobj; JSProperty *prop; @@ -5566,17 +5761,17 @@ js_FindIdentifierBase(JSContext *cx, JSObject *scopeChain, jsid id) return NULL; if (prop) { if (!pobj->isNative()) { - JS_ASSERT(!obj->getParent()); + JS_ASSERT(obj->isGlobal()); return obj; } - JS_ASSERT_IF(obj->getParent(), pobj->getClass() == obj->getClass()); + JS_ASSERT_IF(obj->isInternalScope(), pobj->getClass() == obj->getClass()); DebugOnly entry = JS_PROPERTY_CACHE(cx).fill(cx, scopeChain, scopeIndex, pobj, (Shape *) prop); JS_ASSERT(entry); return obj; } - JSObject *parent = obj->getParent(); + JSObject *parent = obj->scopeChain(); if (!parent) return obj; obj = parent; @@ -5596,11 +5791,11 @@ js_FindIdentifierBase(JSContext *cx, JSObject *scopeChain, jsid id) * chain during JSObject::lookupGeneric. So we must check if parent is * not null here even if it wasn't before the lookup. */ - JSObject *parent = obj->getParent(); + JSObject *parent = obj->scopeChain(); if (!parent) break; obj = parent; - } while (obj->getParent()); + } while (!obj->isGlobal()); return obj; } @@ -5608,27 +5803,23 @@ static JS_ALWAYS_INLINE JSBool js_NativeGetInline(JSContext *cx, JSObject *receiver, JSObject *obj, JSObject *pobj, const Shape *shape, uintN getHow, Value *vp) { - uint32 slot; int32 sample; JS_ASSERT(pobj->isNative()); - slot = shape->slot; - if (slot != SHAPE_INVALID_SLOT) { - *vp = pobj->nativeGetSlot(slot); + if (shape->hasSlot()) { + *vp = pobj->nativeGetSlot(shape->slot()); JS_ASSERT(!vp->isMagic()); JS_ASSERT_IF(!pobj->hasSingletonType() && shape->hasDefaultGetterOrIsMethod(), - js::types::TypeHasProperty(cx, pobj->type(), shape->propid, *vp)); + js::types::TypeHasProperty(cx, pobj->type(), shape->propid(), *vp)); } else { vp->setUndefined(); } if (shape->hasDefaultGetter()) return true; - if (JS_UNLIKELY(shape->isMethod()) && (getHow & JSGET_NO_METHOD_BARRIER)) { - JS_ASSERT(shape->methodObject() == vp->toObject()); + if (JS_UNLIKELY(shape->isMethod()) && (getHow & JSGET_NO_METHOD_BARRIER)) return true; - } jsbytecode *pc; JSScript *script = cx->stack.currentScript(&pc); @@ -5642,12 +5833,11 @@ js_NativeGetInline(JSContext *cx, JSObject *receiver, JSObject *obj, JSObject *p if (!shape->get(cx, receiver, obj, pobj, vp)) return false; - if (pobj->containsSlot(slot) && - (JS_LIKELY(cx->runtime->propertyRemovals == sample) || - pobj->nativeContains(cx, *shape))) { - if (!pobj->methodWriteBarrier(cx, *shape, *vp)) - return false; - pobj->nativeSetSlot(slot, *vp); + /* Update slotful shapes according to the value produced by the getter. */ + if (shape->hasSlot() && pobj->nativeContains(cx, *shape)) { + /* Method shapes were removed by methodReadBarrier under shape->get(). */ + JS_ASSERT(!shape->isMethod()); + pobj->nativeSetSlot(shape->slot(), *vp); } return true; @@ -5663,22 +5853,18 @@ js_NativeGet(JSContext *cx, JSObject *obj, JSObject *pobj, const Shape *shape, u JSBool js_NativeSet(JSContext *cx, JSObject *obj, const Shape *shape, bool added, bool strict, Value *vp) { - AddTypePropertyId(cx, obj, shape->propid, *vp); - - uint32 slot; - int32 sample; + AddTypePropertyId(cx, obj, shape->propid(), *vp); JS_ASSERT(obj->isNative()); - slot = shape->slot; - if (slot != SHAPE_INVALID_SLOT) { + if (shape->hasSlot()) { + uint32 slot = shape->slot(); JS_ASSERT(obj->containsSlot(slot)); - /* If shape has a stub setter, keep obj locked and just store *vp. */ + /* If shape has a stub setter, just store *vp. */ if (shape->hasDefaultSetter()) { if (!added) { - /* FIXME: This should pass *shape, not slot, but see bug 630354. */ - if (!obj->methodWriteBarrier(cx, slot, *vp)) + if (shape->isMethod() && !obj->methodShapeChange(cx, *shape)) return false; } obj->nativeSetSlot(slot, *vp); @@ -5695,21 +5881,18 @@ js_NativeSet(JSContext *cx, JSObject *obj, const Shape *shape, bool added, bool return js_ReportGetterOnlyAssignment(cx); } - sample = cx->runtime->propertyRemovals; + int32 sample = cx->runtime->propertyRemovals; if (!shape->set(cx, obj, strict, vp)) return false; - - JS_ASSERT_IF(!obj->inDictionaryMode(), shape->slot == slot); - slot = shape->slot; - if (obj->containsSlot(slot) && + /* + * Update any slot for the shape with the value produced by the setter, + * unless the setter deleted the shape. + */ + if (shape->hasSlot() && (JS_LIKELY(cx->runtime->propertyRemovals == sample) || obj->nativeContains(cx, *shape))) { - if (!added) { - if (!obj->methodWriteBarrier(cx, *shape, *vp)) - return false; - } - obj->setSlot(slot, *vp); + obj->setSlot(shape->slot(), *vp); } return true; @@ -5929,18 +6112,14 @@ JSObject::callMethod(JSContext *cx, jsid id, uintN argc, Value *argv, Value *vp) static bool CloneFunctionForSetMethod(JSContext *cx, Value *vp) { - JSObject *funobj = &vp->toObject(); - JSFunction *fun = funobj->getFunctionPrivate(); + JSFunction *fun = vp->toObject().toFunction(); - /* - * If fun is already different from the original JSFunction, it does not - * need to be cloned again. - */ - if (fun == funobj) { - funobj = CloneFunctionObject(cx, fun); - if (!funobj) + /* Clone the fun unless it already has been. */ + if (!fun->isClonedMethod()) { + fun = CloneFunctionObject(cx, fun); + if (!fun) return false; - vp->setObject(*funobj); + vp->setObject(*fun); } return true; } @@ -6003,7 +6182,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, /* We should never add properties to lexical blocks. */ JS_ASSERT(!obj->isBlock()); - if (!obj->getParent() && + if (obj->isGlobal() && (defineHow & DNP_UNQUALIFIED) && !js::CheckUndeclaredVarAssignment(cx, JSID_TO_STRING(id))) { return JS_FALSE; @@ -6082,7 +6261,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, defineHow &= ~DNP_SET_METHOD; if (shape->hasShortID()) { flags = Shape::HAS_SHORTID; - shortid = shape->shortid; + shortid = shape->shortid(); } attrs &= ~JSPROP_SHARED; getter = shape->getter(); @@ -6099,9 +6278,6 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, shape = NULL; } - JS_ASSERT_IF(shape && shape->isMethod(), pobj->hasMethodBarrier()); - JS_ASSERT_IF(shape && shape->isMethod(), - pobj->getSlot(shape->slot).toObject() == shape->methodObject()); if (shape && (defineHow & DNP_SET_METHOD)) { /* * JSOP_SETMETHOD is assigning to an existing own property. If it @@ -6110,15 +6286,16 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, * cache, as the interpreter has no fast path for these unusual * cases. */ - bool identical = shape->isMethod() && shape->methodObject() == vp->toObject(); - if (!identical) { + if (shape->isMethod()) { + if (obj->nativeGetMethod(shape) == &vp->toObject()) + return true; shape = obj->methodShapeChange(cx, *shape); if (!shape) return false; - if (!CloneFunctionForSetMethod(cx, vp)) - return false; } - return identical || js_NativeSet(cx, obj, shape, false, strict, vp); + if (!CloneFunctionForSetMethod(cx, vp)) + return false; + return js_NativeSet(cx, obj, shape, false, strict, vp); } } @@ -6137,10 +6314,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, * Purge the property cache of now-shadowed id in obj's scope chain. * Do this early, before locking obj to avoid nesting locks. */ - js_PurgeScopeChain(cx, obj, id); - - /* Find or make a property descriptor with the right heritage. */ - if (!obj->ensureClassReservedSlots(cx)) + if (!js_PurgeScopeChain(cx, obj, id)) return JS_FALSE; /* @@ -6152,11 +6326,8 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, JS_ASSERT(!(attrs & (JSPROP_GETTER | JSPROP_SETTER))); JSObject *funobj = &vp->toObject(); - JSFunction *fun = funobj->getFunctionPrivate(); - if (fun == funobj) { + if (!funobj->toFunction()->isClonedMethod()) flags |= Shape::METHOD; - getter = CastAsPropertyOp(funobj); - } } shape = obj->putProperty(cx, id, getter, setter, SHAPE_INVALID_SLOT, @@ -6169,8 +6340,8 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, * Note that we store before calling addProperty, to match the order * in DefineNativeProperty. */ - if (obj->containsSlot(shape->slot)) - obj->nativeSetSlot(shape->slot, UndefinedValue()); + if (obj->containsSlot(shape->slot())) + obj->nativeSetSlot(shape->slot(), UndefinedValue()); /* XXXbe called with obj locked */ if (!CallAddPropertyHook(cx, clasp, obj, shape, vp)) { @@ -6180,8 +6351,8 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, added = true; } - if (defineHow & DNP_CACHE_RESULT) - JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, obj, shape, added); + if ((defineHow & DNP_CACHE_RESULT) && !added) + JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, obj, shape); return js_NativeSet(cx, obj, shape, added, strict, vp); } @@ -6296,13 +6467,13 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval, JSBool str return true; } - if (!CallJSPropertyOp(cx, obj->getClass()->delProperty, obj, SHAPE_USERID(shape), rval)) + if (!CallJSPropertyOp(cx, obj->getClass()->delProperty, obj, shape->getUserId(), rval)) return false; if (rval->isFalse()) return true; - if (obj->containsSlot(shape->slot)) { - const Value &v = obj->nativeGetSlot(shape->slot); + if (shape->hasSlot() && obj->containsSlot(shape->slot())) { + const Value &v = obj->nativeGetSlot(shape->slot()); GCPoke(cx, v); /* @@ -6316,27 +6487,20 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval, JSBool str * so the only way they could have the method's joined function object * as callee is through an API abusage. We break any such edge case. */ - if (obj->hasMethodBarrier()) { - JSObject *funobj; - - if (IsFunctionObject(v, &funobj)) { - JSFunction *fun = funobj->getFunctionPrivate(); - - if (fun != funobj) { - for (StackFrame *fp = cx->maybefp(); fp; fp = fp->prev()) { - if (fp->isFunctionFrame() && - fp->callee() == fun->compiledFunObj() && - fp->thisValue().isObject()) - { - JSObject *tmp = &fp->thisValue().toObject(); - do { - if (tmp == obj) { - fp->overwriteCallee(*funobj); - break; - } - } while ((tmp = tmp->getProto()) != NULL); + JSFunction *fun; + if (IsFunctionObject(v, &fun) && fun->isClonedMethod()) { + for (StackFrame *fp = cx->maybefp(); fp; fp = fp->prev()) { + if (fp->isFunctionFrame() && + fp->fun()->script() == fun->script() && + fp->thisValue().isObject()) + { + JSObject *tmp = &fp->thisValue().toObject(); + do { + if (tmp == obj) { + fp->overwriteCallee(*fun); + break; } - } + } while ((tmp = tmp->getProto()) != NULL); } } } @@ -6360,8 +6524,9 @@ bool HasDataProperty(JSContext *cx, JSObject *obj, jsid methodid, Value *vp) { if (const Shape *shape = obj->nativeLookup(cx, methodid)) { - if (shape->hasDefaultGetterOrIsMethod() && obj->containsSlot(shape->slot)) { - *vp = obj->nativeGetSlot(shape->slot); + if (shape->hasDefaultGetterOrIsMethod() && shape->hasSlot()) { + JS_ASSERT(obj->containsSlot(shape->slot())); + *vp = obj->nativeGetSlot(shape->slot()); return true; } } @@ -6533,8 +6698,8 @@ CheckAccess(JSContext *cx, JSObject *obj, jsid id, JSAccessMode mode, shape = (Shape *)prop; *attrsp = shape->attributes(); if (!writing) { - if (pobj->containsSlot(shape->slot)) - *vp = pobj->nativeGetSlot(shape->slot); + if (shape->hasSlot()) + *vp = pobj->nativeGetSlot(shape->slot()); else vp->setUndefined(); } @@ -6803,9 +6968,9 @@ js_PrintObjectSlotName(JSTracer *trc, char *buf, size_t bufsize) const Shape *shape; if (obj->isNative()) { shape = obj->lastProperty(); - while (shape->previous() && shape->slot != slot) + while (shape->previous() && shape->maybeSlot() != slot) shape = shape->previous(); - if (shape->slot != slot) + if (shape->maybeSlot() != slot) shape = NULL; } else { shape = NULL; @@ -6825,7 +6990,7 @@ js_PrintObjectSlotName(JSTracer *trc, char *buf, size_t bufsize) else JS_snprintf(buf, bufsize, "**UNKNOWN SLOT %ld**", (long)slot); } else { - jsid propid = shape->propid; + jsid propid = shape->propid(); if (JSID_IS_INT(propid)) { JS_snprintf(buf, bufsize, "%ld", (long)JSID_TO_INT(propid)); } else if (JSID_IS_ATOM(propid)) { @@ -6853,7 +7018,7 @@ js_ClearNative(JSContext *cx, JSObject *obj) { /* Remove all configurable properties from obj. */ while (const Shape *shape = LastConfigurableShape(obj)) { - if (!obj->removeProperty(cx, shape->propid)) + if (!obj->removeProperty(cx, shape->propid())) return false; } @@ -6863,8 +7028,8 @@ js_ClearNative(JSContext *cx, JSObject *obj) if (shape->isDataDescriptor() && shape->writable() && shape->hasDefaultSetter() && - obj->containsSlot(shape->slot)) { - obj->setSlot(shape->slot, UndefinedValue()); + obj->containsSlot(shape->slot())) { + obj->setSlot(shape->slot(), UndefinedValue()); } } return true; @@ -6878,10 +7043,8 @@ js_GetReservedSlot(JSContext *cx, JSObject *obj, uint32 slot, Value *vp) return true; } - if (slot < obj->numSlots()) - *vp = obj->getSlot(slot); - else - vp->setUndefined(); + JS_ASSERT(slot < JSSLOT_FREE(obj->getClass())); + *vp = obj->getSlot(slot); return true; } @@ -6891,28 +7054,15 @@ js_SetReservedSlot(JSContext *cx, JSObject *obj, uint32 slot, const Value &v) if (!obj->isNative()) return true; - Class *clasp = obj->getClass(); - - if (slot >= obj->numSlots()) { - uint32 nslots = JSSLOT_FREE(clasp); - JS_ASSERT(slot < nslots); - if (!obj->allocSlots(cx, nslots)) - return false; - } - + JS_ASSERT(slot < JSSLOT_FREE(obj->getClass())); obj->setSlot(slot, v); GCPoke(cx, NullValue()); return true; } -GlobalObject * -JSObject::getGlobal() const -{ - JSObject *obj = const_cast(this); - while (JSObject *parent = obj->getParent()) - obj = parent; - return obj->asGlobal(); -} +static ObjectElements emptyObjectHeader(0, 0); +HeapValue *js::emptyObjectElements = + (HeapValue *) (jsuword(&emptyObjectHeader) + sizeof(ObjectElements)); JSBool js_ReportGetterOnlyAssignment(JSContext *cx) @@ -7056,8 +7206,7 @@ dumpValue(const Value &v) else if (v.isString()) dumpString(v.toString()); else if (v.isObject() && v.toObject().isFunction()) { - JSObject *funobj = &v.toObject(); - JSFunction *fun = funobj->getFunctionPrivate(); + JSFunction *fun = v.toObject().toFunction(); if (fun->atom) { fputs("atom, 0); @@ -7069,7 +7218,7 @@ dumpValue(const Value &v) fprintf(stderr, " (%s:%u)", script->filename ? script->filename : "", script->lineno); } - fprintf(stderr, " at %p (JSFunction at %p)>", (void *) funobj, (void *) fun); + fprintf(stderr, " at %p>", (void *) fun); } else if (v.isObject()) { JSObject *obj = &v.toObject(); Class *clasp = obj->getClass(); @@ -7118,7 +7267,7 @@ js_DumpId(jsid id) static void DumpProperty(JSObject *obj, const Shape &shape) { - jsid id = shape.propid; + jsid id = shape.propid(); uint8 attrs = shape.attributes(); fprintf(stderr, " ((Shape *) %p) ", (void *) &shape); @@ -7126,7 +7275,7 @@ DumpProperty(JSObject *obj, const Shape &shape) if (attrs & JSPROP_READONLY) fprintf(stderr, "readonly "); if (attrs & JSPROP_PERMANENT) fprintf(stderr, "permanent "); if (attrs & JSPROP_SHARED) fprintf(stderr, "shared "); - if (shape.isMethod()) fprintf(stderr, "method=%p ", (void *) &shape.methodObject()); + if (shape.isMethod()) fprintf(stderr, "method "); if (shape.hasGetterValue()) fprintf(stderr, "getterValue=%p ", (void *) shape.getterObject()); @@ -7144,11 +7293,13 @@ DumpProperty(JSObject *obj, const Shape &shape) fprintf(stderr, "%d", (int) JSID_TO_INT(id)); else fprintf(stderr, "unknown jsid %p", (void *) JSID_BITS(id)); - fprintf(stderr, ": slot %d", shape.slot); - if (obj->containsSlot(shape.slot)) { + + uint32 slot = shape.hasSlot() ? shape.maybeSlot() : SHAPE_INVALID_SLOT; + fprintf(stderr, ": slot %d", slot); + if (obj->containsSlot(slot)) { fprintf(stderr, " = "); - dumpValue(obj->getSlot(shape.slot)); - } else if (shape.slot != SHAPE_INVALID_SLOT) { + dumpValue(obj->getSlot(slot)); + } else if (slot != SHAPE_INVALID_SLOT) { fprintf(stderr, " (INVALID!)"); } fprintf(stderr, "\n"); @@ -7162,30 +7313,17 @@ js_DumpObject(JSObject *obj) fprintf(stderr, "class %p %s\n", (void *)clasp, clasp->name); fprintf(stderr, "flags:"); - uint32 flags = obj->flags; - if (flags & JSObject::DELEGATE) fprintf(stderr, " delegate"); - if (flags & JSObject::SYSTEM) fprintf(stderr, " system"); - if (flags & JSObject::NOT_EXTENSIBLE) fprintf(stderr, " not_extensible"); - if (flags & JSObject::BRANDED) fprintf(stderr, " branded"); - if (flags & JSObject::GENERIC) fprintf(stderr, " generic"); - if (flags & JSObject::METHOD_BARRIER) fprintf(stderr, " method_barrier"); - if (flags & JSObject::INDEXED) fprintf(stderr, " indexed"); - if (flags & JSObject::OWN_SHAPE) fprintf(stderr, " own_shape"); - if (flags & JSObject::HAS_EQUALITY) fprintf(stderr, " has_equality"); + if (obj->isDelegate()) fprintf(stderr, " delegate"); + if (obj->isSystem()) fprintf(stderr, " system"); + if (!obj->isExtensible()) fprintf(stderr, " not_extensible"); + if (obj->isIndexed()) fprintf(stderr, " indexed"); - bool anyFlags = flags != 0; if (obj->isNative()) { - if (obj->inDictionaryMode()) { + if (obj->inDictionaryMode()) fprintf(stderr, " inDictionaryMode"); - anyFlags = true; - } - if (obj->hasPropertyTable()) { + if (obj->hasPropertyTable()) fprintf(stderr, " hasPropertyTable"); - anyFlags = true; - } } - if (!anyFlags) - fprintf(stderr, " none"); fprintf(stderr, "\n"); if (obj->isDenseArray()) { diff --git a/js/src/jsobj.h b/js/src/jsobj.h index 2b758eacbc45..d223d78c6977 100644 --- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -237,11 +237,6 @@ typedef Vector PropDescArray; } /* namespace js */ -enum { - INVALID_SHAPE = 0x8fffffff, - SHAPELESS = 0xffffffff -}; - /* * On success, and if id was found, return true with *objp non-null and with a * property of *objp stored in *propp. If successful but id was not found, @@ -355,345 +350,276 @@ class StrictArgumentsObject; class StringObject; class RegExpObject; +/* + * Header structure for object element arrays. This structure is immediately + * followed by an array of elements, with the elements member in an object + * pointing to the beginning of that array (the end of this structure). + * See below for usage of this structure. + */ +class ObjectElements +{ + friend struct ::JSObject; + + /* Number of allocated slots. */ + uint32 capacity; + + /* + * Number of initialized elements. This is <= the capacity, and for arrays + * is <= the length. Memory for elements above the initialized length is + * uninitialized, but values between the initialized length and the proper + * length are conceptually holes. + */ + uint32 initializedLength; + + /* 'length' property of array objects, unused for other objects. */ + uint32 length; + + /* :XXX: bug 586842 store state about sparse slots. */ + uint32 unused; + + void staticAsserts() { + JS_STATIC_ASSERT(sizeof(ObjectElements) == VALUES_PER_HEADER * sizeof(Value)); + } + + public: + + ObjectElements(uint32 capacity, uint32 length) + : capacity(capacity), initializedLength(0), length(length) + {} + + HeapValue * elements() { return (HeapValue *)(jsuword(this) + sizeof(ObjectElements)); } + static ObjectElements * fromElements(HeapValue *elems) { + return (ObjectElements *)(jsuword(elems) - sizeof(ObjectElements)); + } + + static int offsetOfCapacity() { + return (int)offsetof(ObjectElements, capacity) - (int)sizeof(ObjectElements); + } + static int offsetOfInitializedLength() { + return (int)offsetof(ObjectElements, initializedLength) - (int)sizeof(ObjectElements); + } + static int offsetOfLength() { + return (int)offsetof(ObjectElements, length) - (int)sizeof(ObjectElements); + } + + static const size_t VALUES_PER_HEADER = 2; +}; + +/* Shared singleton for objects with no elements. */ +extern HeapValue *emptyObjectElements; + } /* namespace js */ /* - * JSObject struct, with members sized to fit in 32 bytes on 32-bit targets, - * 64 bytes on 64-bit systems. The JSFunction struct is an extension of this - * struct allocated from a larger GC size-class. + * JSObject struct. The JSFunction struct is an extension of this struct + * allocated from a larger GC size-class. * - * The clasp member stores the js::Class pointer for this object. + * The lastProp member stores the shape of the object, which includes the + * object's class and the layout of all its properties. * * The type member stores the type of the object, which contains its prototype * object and the possible types of its properties. * - * An object is a delegate if it is on another object's prototype (type->proto - * field) or scope chain (the parent field), and therefore the delegate might - * be asked implicitly to get or set a property on behalf of another object. - * Delegates may be accessed directly too, as may any object, but only those - * objects linked after the head of any prototype or scope chain are flagged - * as delegates. This definition helps to optimize shape-based property cache - * invalidation (see Purge{Scope,Proto}Chain in jsobj.cpp). + * The rest of the object stores its named properties and indexed elements. + * These are stored separately from one another. Objects are followed by an + * variable-sized array of values for inline storage, which may be used by + * either properties of native objects (fixed slots) or by elements. * - * The meaning of the system object bit is defined by the API client. It is - * set in JS_NewSystemObject and is queried by JS_IsSystemObject (jsdbgapi.h), - * but it has no intrinsic meaning to SpiderMonkey. Further, JSFILENAME_SYSTEM - * and JS_FlagScriptFilenamePrefix (also exported via jsdbgapi.h) are intended - * to be complementary to this bit, but it is up to the API client to implement - * any such association. + * Two native objects with the same shape are guaranteed to have the same + * number of fixed slots. * - * Both these flag bits are initially zero; they may be set or queried using - * the (is|set)(Delegate|System) inline methods. + * Named property storage can be split between fixed slots and a dynamically + * allocated array (the slots member). For an object with N fixed slots, shapes + * with slots [0..N-1] are stored in the fixed slots, and the remainder are + * stored in the dynamic array. If all properties fit in the fixed slots, the + * 'slots' member is NULL. * - * Objects can have slots allocated either in a fixed array immediately - * following the object, in dynamically allocated slots, or both. In all cases, - * 'capacity' gives the number of usable slots. How the slots are organized - * is different for dense arrays vs. other objects. + * Elements are indexed via the 'elements' member. This member can point to + * either the shared emptyObjectElements singleton, into the inline value array + * (the address of the third value, to leave room for a ObjectElements header; + * in this case numFixedSlots() is zero) or to a dynamically allocated array. * - * For dense arrays (arrays with only normal integer properties), the 'slots' - * member points either to the fixed array or to a dynamic array, and in - * all cases is indexed by the associated property (e.g. obj->slots[5] stores - * the value for property '5'). If a dynamic array is in use, slots in the - * fixed array are not used. + * Only certain combinations of properties and elements storage are currently + * possible. This will be changing soon :XXX: bug 586842. * - * ArrayBuffer objects may also use their fixed slots for storage in a similar - * manner to dense arrays. The fixed slots do not represent Values in such - * cases. (ArrayBuffers never have other properties added directly to them, as - * they delegate such attempts to another JSObject). + * - For objects other than arrays and typed arrays, the elements are empty. * - * For objects other than dense arrays and array buffers, if the object has N - * fixed slots then those are always the first N slots of the object. The - * dynamic slots pointer is used if those fixed slots overflow, and stores all - * remaining slots. The dynamic slots pointer is NULL if there is no slots - * overflow, and never points to the object's fixed slots. Unlike dense arrays, - * the fixed slots can always be accessed. Two objects with the same shape are - * guaranteed to have the same number of fixed slots. + * - For 'slow' arrays, both elements and properties are used, but the + * elements have zero capacity --- only the length member is used. + * + * - For dense arrays, elements are used and properties are not used. + * + * - For typed array buffers, elements are used and properties are not used. + * The data indexed by the elements do not represent Values, but primitive + * unboxed integers or floating point values. */ -struct JSObject : js::gc::Cell { +struct JSObject : js::gc::Cell +{ + private: + friend struct js::Shape; + /* - * Private pointer to the last added property and methods to manipulate the - * list it links among properties in this scope. + * Shape of the object, encodes the layout of the object's properties and + * all other information about its structure. See jsscope.h. */ - js::HeapPtrShape lastProp; - - private: - js::Class *clasp; - - protected: - inline void setLastProperty(const js::Shape *shape); - - private: - inline void removeLastProperty(); + js::HeapPtrShape shape_; #ifdef DEBUG void checkShapeConsistency(); #endif - public: - inline const js::Shape *lastProperty() const; - - inline js::Shape **nativeSearch(JSContext *cx, jsid id, bool adding = false); - inline const js::Shape *nativeLookup(JSContext *cx, jsid id); - - inline bool nativeContains(JSContext *cx, jsid id); - inline bool nativeContains(JSContext *cx, const js::Shape &shape); - - enum { - DELEGATE = 0x01, - SYSTEM = 0x02, - NOT_EXTENSIBLE = 0x04, - BRANDED = 0x08, - GENERIC = 0x10, - METHOD_BARRIER = 0x20, - INDEXED = 0x40, - OWN_SHAPE = 0x80, - METHOD_THRASH_COUNT_MASK = 0x300, - METHOD_THRASH_COUNT_SHIFT = 8, - METHOD_THRASH_COUNT_MAX = METHOD_THRASH_COUNT_MASK >> METHOD_THRASH_COUNT_SHIFT, - BOUND_FUNCTION = 0x400, - HAS_EQUALITY = 0x800, - VAROBJ = 0x1000, - WATCHED = 0x2000, - PACKED_ARRAY = 0x4000, - ITERATED = 0x8000, - SINGLETON_TYPE = 0x10000, - LAZY_TYPE = 0x20000, - - /* The top 5 bits of an object's flags are its number of fixed slots. */ - FIXED_SLOTS_SHIFT = 27, - FIXED_SLOTS_MASK = 0x1f << FIXED_SLOTS_SHIFT, - - UNUSED_FLAG_BITS = 0x07FC0000 - }; - - /* - * Impose a sane upper bound, originally checked only for dense arrays, on - * number of slots in an object. - */ - enum { - NSLOTS_BITS = 29, - NSLOTS_LIMIT = JS_BIT(NSLOTS_BITS) - }; - - uint32 flags; /* flags */ - uint32 objShape; /* copy of lastProp->shape, or override if different */ - - /* - * If prototype, type of values using this as their prototype. If a dense - * array, this holds the initialized length (see jsarray.cpp). - */ - js::HeapPtr newType; - - jsuword &initializedLength() { return *newType.unsafeGetUnioned(); } - - JS_FRIEND_API(size_t) sizeOfSlotsArray(JSMallocSizeOfFun mallocSizeOf); - - js::HeapPtrObject parent; /* object's parent */ - void *privateData; /* private data */ - jsuword capacity; /* total number of available slots */ - - private: - js::HeapValue *slots; /* dynamically allocated slots, - or pointer to fixedSlots() for - dense arrays. */ - /* * The object's type and prototype. For objects with the LAZY_TYPE flag * set, this is the prototype's default 'new' type and can only be used * to get that prototype. */ - js::HeapPtr type_; + js::HeapPtrTypeObject type_; /* Make the type object to use for LAZY_TYPE objects. */ void makeLazyType(JSContext *cx); public: + inline js::Shape *lastProperty() const { + JS_ASSERT(shape_); + return shape_; + } + + /* + * Update the last property, keeping the number of allocated slots in sync + * with the object's new slot span. + */ + bool setLastProperty(JSContext *cx, const js::Shape *shape); + + /* As above, but does not change the slot span. */ + inline void setLastPropertyInfallible(const js::Shape *shape); + + /* Make a non-array object with the specified initial state. */ + static inline JSObject *create(JSContext *cx, + js::gc::AllocKind kind, + js::Shape *shape, + js::types::TypeObject *type, + js::HeapValue *slots); + + /* Make a dense array object with the specified initial state. */ + static inline JSObject *createDenseArray(JSContext *cx, + js::gc::AllocKind kind, + js::Shape *shape, + js::types::TypeObject *type, + uint32 length); + + /* + * Remove the last property of an object, provided that it is safe to do so + * (the shape and previous shape do not carry conflicting information about + * the object itself). + */ + inline void removeLastProperty(JSContext *cx); + inline bool canRemoveLastProperty(); + + /* + * Update the slot span directly for a dictionary object, and allocate + * slots to cover the new span if necessary. + */ + bool setSlotSpan(JSContext *cx, uint32 span); + + static inline size_t offsetOfShape() { return offsetof(JSObject, shape_); } + inline js::HeapPtrShape *addressOfShape() { return &shape_; } + + inline js::Shape **nativeSearch(JSContext *cx, jsid id, bool adding = false); + const js::Shape *nativeLookup(JSContext *cx, jsid id); + + inline bool nativeContains(JSContext *cx, jsid id); + inline bool nativeContains(JSContext *cx, const js::Shape &shape); + + /* Upper bound on the number of elements in an object. */ + static const uint32 NELEMENTS_LIMIT = JS_BIT(29); + + private: + js::HeapValue *slots; /* Slots for object properties. */ + js::HeapValue *elements; /* Slots for object elements. */ + + public: + inline bool isNative() const; - inline bool isNewborn() const; - void setClass(js::Class *c) { clasp = c; } - js::Class *getClass() const { return clasp; } - JSClass *getJSClass() const { return Jsvalify(clasp); } - - bool hasClass(const js::Class *c) const { - return c == clasp; - } - - const js::ObjectOps *getOps() const { - return &getClass()->ops; - } + inline js::Class *getClass() const; + inline JSClass *getJSClass() const; + inline bool hasClass(const js::Class *c) const; + inline const js::ObjectOps *getOps() const; inline void scanSlots(js::GCMarker *gcmarker); - uint32 shape() const { - JS_ASSERT(objShape != INVALID_SHAPE); - return objShape; - } + /* + * An object is a delegate if it is on another object's prototype or scope + * chain, and therefore the delegate might be asked implicitly to get or + * set a property on behalf of another object. Delegates may be accessed + * directly too, as may any object, but only those objects linked after the + * head of any prototype or scope chain are flagged as delegates. This + * definition helps to optimize shape-based property cache invalidation + * (see Purge{Scope,Proto}Chain in jsobj.cpp). + */ + inline bool isDelegate() const; + inline bool setDelegate(JSContext *cx); - bool isDelegate() const { return !!(flags & DELEGATE); } - void setDelegate() { flags |= DELEGATE; } - void clearDelegate() { flags &= ~DELEGATE; } - - bool isBoundFunction() const { return !!(flags & BOUND_FUNCTION); } - - static void setDelegateNullSafe(JSObject *obj) { - if (obj) - obj->setDelegate(); - } - - bool isSystem() const { return !!(flags & SYSTEM); } - void setSystem() { flags |= SYSTEM; } + inline bool isBoundFunction() const; /* - * A branded object contains plain old methods (function-valued properties - * without magic getters and setters), and its shape evolves whenever a - * function value changes. + * The meaning of the system object bit is defined by the API client. It is + * set in JS_NewSystemObject and is queried by JS_IsSystemObject, but it + * has no intrinsic meaning to SpiderMonkey. */ - bool branded() { return !!(flags & BRANDED); } + inline bool isSystem() const; + inline bool setSystem(JSContext *cx); + + inline bool hasSpecialEquality() const; + + inline bool watched() const; + inline bool setWatched(JSContext *cx); + + /* See StackFrame::varObj. */ + inline bool isVarObj() const; + inline bool setVarObj(JSContext *cx); /* - * NB: these return false on shape overflow but do not report any error. - * Callers who depend on shape guarantees should therefore bail off trace, - * e.g., on false returns. + * Objects with an uncacheable proto can have their prototype mutated + * without inducing a shape change on the object. Property cache entries + * and JIT inline caches should not be filled for lookups across prototype + * lookups on the object. */ - bool brand(JSContext *cx); - bool unbrand(JSContext *cx); + inline bool hasUncacheableProto() const; + inline bool setUncacheableProto(JSContext *cx); - bool generic() { return !!(flags & GENERIC); } - void setGeneric() { flags |= GENERIC; } + bool generateOwnShape(JSContext *cx, js::Shape *newShape = NULL); - uintN getMethodThrashCount() const { - return (flags & METHOD_THRASH_COUNT_MASK) >> METHOD_THRASH_COUNT_SHIFT; - } - - void setMethodThrashCount(uintN count) { - JS_ASSERT(count <= METHOD_THRASH_COUNT_MAX); - flags = (flags & ~METHOD_THRASH_COUNT_MASK) | (count << METHOD_THRASH_COUNT_SHIFT); - } - - bool hasSpecialEquality() const { return !!(flags & HAS_EQUALITY); } - void assertSpecialEqualitySynced() const { - JS_ASSERT(!!clasp->ext.equality == hasSpecialEquality()); - } - - /* Sets an object's HAS_EQUALITY flag based on its clasp. */ - inline void syncSpecialEquality(); - - bool watched() const { return !!(flags & WATCHED); } - - void setWatched(JSContext *cx) { - if (!watched()) { - flags |= WATCHED; - generateOwnShape(cx); - } - } - - /* See StackFrame::varObj. */ - inline bool isVarObj() const { return flags & VAROBJ; } - inline void makeVarObj() { flags |= VAROBJ; } private: - void generateOwnShape(JSContext *cx); + enum GenerateShape { + GENERATE_NONE, + GENERATE_SHAPE + }; - inline void setOwnShape(uint32 s); - inline void clearOwnShape(); + bool setFlag(JSContext *cx, /*BaseShape::Flag*/ uint32 flag, + GenerateShape generateShape = GENERATE_NONE); public: inline bool nativeEmpty() const; - bool hasOwnShape() const { return !!(flags & OWN_SHAPE); } - - inline void initMap(js::Shape *amap); - inline void setMap(js::Shape *amap); - - inline void setSharedNonNativeMap(); - - /* Functions for setting up scope chain object maps and shapes. */ - void initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent); - void initClonedBlock(JSContext *cx, js::types::TypeObject *type, js::StackFrame *priv); - void setBlockOwnShape(JSContext *cx); - - void deletingShapeChange(JSContext *cx, const js::Shape &shape); - const js::Shape *methodShapeChange(JSContext *cx, const js::Shape &shape); - bool methodShapeChange(JSContext *cx, uint32 slot); - void protoShapeChange(JSContext *cx); - void shadowingShapeChange(JSContext *cx, const js::Shape &shape); - bool globalObjectOwnShapeChange(JSContext *cx); - - void extensibleShapeChange(JSContext *cx) { - /* This will do for now. */ - generateOwnShape(cx); - } - - /* - * A scope has a method barrier when some compiler-created "null closure" - * function objects (functions that do not use lexical bindings above their - * scope, only free variable names) that have a correct JSSLOT_PARENT value - * thanks to the COMPILE_N_GO optimization are stored as newly added direct - * property values of the scope's object. - * - * The de-facto standard JS language requires each evaluation of such a - * closure to result in a unique (according to === and observable effects) - * function object. ES3 tried to allow implementations to "join" such - * objects to a single compiler-created object, but this makes an overt - * mutation hazard, also an "identity hazard" against interoperation among - * implementations that join and do not join. - * - * To stay compatible with the de-facto standard, we store the compiler- - * created function object as the method value and set the METHOD_BARRIER - * flag. - * - * The method value is part of the method property tree node's identity, so - * it effectively brands the scope with a predictable shape corresponding - * to the method value, but without the overhead of setting the BRANDED - * flag, which requires assigning a new shape peculiar to each branded - * scope. Instead the shape is shared via the property tree among all the - * scopes referencing the method property tree node. - * - * Then when reading from a scope for which scope->hasMethodBarrier() is - * true, we count on the scope's qualified/guarded shape being unique and - * add a read barrier that clones the compiler-created function object on - * demand, reshaping the scope. - * - * This read barrier is bypassed when evaluating the callee sub-expression - * of a call expression (see the JOF_CALLOP opcodes in jsopcode.tbl), since - * such ops do not present an identity or mutation hazard. The compiler - * performs this optimization only for null closures that do not use their - * own name or equivalent built-in references (arguments.callee). - * - * The BRANDED write barrier, JSObject::methodWriteBarrer, must check for - * METHOD_BARRIER too, and regenerate this scope's shape if the method's - * value is in fact changing. - */ - bool hasMethodBarrier() { return !!(flags & METHOD_BARRIER); } - void setMethodBarrier() { flags |= METHOD_BARRIER; } - - /* - * Test whether this object may be branded due to method calls, which means - * any assignment to a function-valued property must regenerate shape; else - * test whether this object has method properties, which require a method - * write barrier. - */ - bool brandedOrHasMethodBarrier() { return !!(flags & (BRANDED | METHOD_BARRIER)); } + js::Shape *methodShapeChange(JSContext *cx, const js::Shape &shape); + bool shadowingShapeChange(JSContext *cx, const js::Shape &shape); /* * Read barrier to clone a joined function object stored as a method. * Defined in jsobjinlines.h, but not declared inline per standard style in * order to avoid gcc warnings. */ - const js::Shape *methodReadBarrier(JSContext *cx, const js::Shape &shape, js::Value *vp); + js::Shape *methodReadBarrier(JSContext *cx, const js::Shape &shape, js::Value *vp); - /* - * Write barrier to check for a change of method value. Defined inline in - * jsobjinlines.h after methodReadBarrier. The slot flavor is required by - * JSOP_*GVAR, which deals in slots not shapes, while not deoptimizing to - * map slot to shape unless JSObject::flags show that this is necessary. - * The methodShapeChange overload (above) parallels this. - */ - const js::Shape *methodWriteBarrier(JSContext *cx, const js::Shape &shape, const js::Value &v); - bool methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v); + /* Whether method shapes can be added to this object. */ + inline bool canHaveMethodBarrier() const; - bool isIndexed() const { return !!(flags & INDEXED); } - void setIndexed() { flags |= INDEXED; } + inline bool isIndexed() const; + inline bool setIndexed(JSContext *cx); + + /* Set the indexed flag on this object if id is an indexed property. */ + inline bool maybeSetIndexed(JSContext *cx, jsid id); /* * Return true if this object is a native one that has been converted from @@ -706,24 +632,19 @@ struct JSObject : js::gc::Cell { inline bool hasPropertyTable() const; - uint32 numSlots() const { return uint32(capacity); } - inline size_t structSize() const; inline size_t slotsAndStructSize() const; + inline size_t dynamicSlotSize(JSMallocSizeOfFun mallocSizeOf) const; - /* Slot accessors for JITs. */ + inline size_t numFixedSlots() const; - static inline size_t getFixedSlotOffset(size_t slot); - static inline size_t offsetOfCapacity() { return offsetof(JSObject, capacity); } - static inline size_t offsetOfSlots() { return offsetof(JSObject, slots); } - static inline size_t offsetOfInitializedLength() { return offsetof(JSObject, newType); } + static const uint32 MAX_FIXED_SLOTS = 16; - /* - * Get a raw pointer to the object's slots, or a slot of the object given - * a previous value for its since-reallocated dynamic slots. - */ - inline const js::HeapValue *getRawSlots(); - inline const js::HeapValue *getRawSlot(size_t slot, const js::HeapValue *slots); + private: + inline js::HeapValue* fixedSlots() const; + public: + + /* Accessors for properties. */ /* Whether a slot is at a fixed offset from this object. */ inline bool isFixedSlot(size_t slot); @@ -731,45 +652,54 @@ struct JSObject : js::gc::Cell { /* Index into the dynamic slots array to use for a dynamic slot. */ inline size_t dynamicSlotIndex(size_t slot); - inline size_t numFixedSlots() const; + /* Get a raw pointer to the object's properties. */ + inline const js::HeapValue *getRawSlots(); - /* Whether this object has any dynamic slots at all. */ - inline bool hasSlotsArray() const; + /* JIT Accessors */ + static inline size_t getFixedSlotOffset(size_t slot); + static inline size_t getPrivateDataOffset(size_t nfixed); + static inline size_t offsetOfSlots() { return offsetof(JSObject, slots); } - /* Get the number of dynamic slots required for a given capacity. */ - inline size_t numDynamicSlots(size_t capacity) const; + /* Minimum size for dynamically allocated slots. */ + static const uint32 SLOT_CAPACITY_MIN = 8; - private: - inline js::HeapValue *fixedSlots() const; + /* + * Grow or shrink slots immediately before changing the slot span. + * The number of allocated slots is not stored explicitly, and changes to + * the slots must track changes in the slot span. + */ + bool growSlots(JSContext *cx, uint32 oldCount, uint32 newCount); + void shrinkSlots(JSContext *cx, uint32 oldCount, uint32 newCount); + + bool hasDynamicSlots() const { return slots != NULL; } + + /* + * Get the number of dynamic slots to allocate to cover the properties in + * an object with the given number of fixed slots and slot span. The slot + * capacity is not stored explicitly, and the allocated size of the slot + * array is kept in sync with this count. + */ + static inline size_t dynamicSlotsCount(size_t nfixed, size_t span); + + /* Compute dynamicSlotsCount() for this object. */ + inline size_t numDynamicSlots() const; protected: inline bool hasContiguousSlots(size_t start, size_t count) const; + inline void initializeSlotRange(size_t start, size_t count); + inline void invalidateSlotRange(size_t start, size_t count); + + inline bool updateSlotsForSpan(JSContext *cx, size_t oldSpan, size_t newSpan); + public: - /* Minimum size for dynamically allocated slots. */ - static const uint32 SLOT_CAPACITY_MIN = 8; - - bool allocSlots(JSContext *cx, size_t nslots); - bool growSlots(JSContext *cx, size_t nslots); - void shrinkSlots(JSContext *cx, size_t nslots); - - bool ensureSlots(JSContext *cx, size_t nslots) { - if (numSlots() < nslots) - return growSlots(cx, nslots); - return true; - } /* * Trigger the write barrier on a range of slots that will no longer be * reachable. */ inline void prepareSlotRangeForOverwrite(size_t start, size_t end); - - /* - * Fill a range of slots with holes or undefined, depending on whether this - * is a dense array. - */ - void clearSlotRange(size_t start, size_t length); + inline void prepareElementRangeForOverwrite(size_t start, size_t end); /* * Copy a flat array of slots to this object at a start slot. Caller must @@ -779,62 +709,51 @@ struct JSObject : js::gc::Cell { */ void copySlotRange(size_t start, const js::Value *vector, size_t length, bool valid); - /* - * Ensure that the object has at least JSCLASS_RESERVED_SLOTS(clasp) + - * nreserved slots. - * - * This method may be called only for native objects freshly created using - * NewObject or one of its variant where the new object will both (a) never - * escape to script and (b) never be extended with ad-hoc properties that - * would try to allocate higher slots without the fresh object first having - * its map set to a shape path that maps those slots. - * - * Block objects satisfy (a) and (b), as there is no evil eval-based way to - * add ad-hoc properties to a Block instance. Call objects satisfy (a) and - * (b) as well, because the compiler-created Shape path that covers args, - * vars, and upvars, stored in their callee function in u.i.names, becomes - * their initial map. - */ - bool ensureInstanceReservedSlots(JSContext *cx, size_t nreserved); - - /* - * NB: ensureClassReservedSlotsForEmptyObject asserts that nativeEmpty() - * Use ensureClassReservedSlots for any object, either empty or already - * extended with properties. - */ - bool ensureClassReservedSlotsForEmptyObject(JSContext *cx); - - inline bool ensureClassReservedSlots(JSContext *cx); - inline uint32 slotSpan() const; inline bool containsSlot(uint32 slot) const; void rollbackProperties(JSContext *cx, uint32 slotSpan); +#ifdef DEBUG + enum SentinelAllowed { + SENTINEL_NOT_ALLOWED, + SENTINEL_ALLOWED + }; + + /* + * Check that slot is in range for the object's allocated slots. + * If sentinelAllowed then slot may equal the slot capacity. + */ + bool slotInRange(uintN slot, SentinelAllowed sentinel = SENTINEL_NOT_ALLOWED) const; +#endif + + js::HeapValue *getSlotAddressUnchecked(uintN slot) { + size_t fixed = numFixedSlots(); + if (slot < fixed) + return fixedSlots() + slot; + return slots + (slot - fixed); + } + js::HeapValue *getSlotAddress(uintN slot) { /* * This can be used to get the address of the end of the slots for the * object, which may be necessary when fetching zero-length arrays of * slots (e.g. for callObjVarArray). */ - JS_ASSERT(!isDenseArray()); - JS_ASSERT(slot <= capacity); - size_t fixed = numFixedSlots(); - if (slot < fixed) - return fixedSlots() + slot; - return slots + (slot - fixed); + JS_ASSERT(slotInRange(slot, SENTINEL_ALLOWED)); + return getSlotAddressUnchecked(slot); } js::HeapValue &getSlotRef(uintN slot) { - JS_ASSERT(slot < capacity); + JS_ASSERT(slotInRange(slot)); return *getSlotAddress(slot); } inline js::HeapValue &nativeGetSlotRef(uintN slot); const js::Value &getSlot(uintN slot) const { - JS_ASSERT(slot < capacity); + JS_ASSERT(slotInRange(slot)); size_t fixed = numFixedSlots(); if (slot < fixed) return fixedSlots()[slot]; @@ -842,6 +761,7 @@ struct JSObject : js::gc::Cell { } inline const js::Value &nativeGetSlot(uintN slot) const; + inline JSFunction *nativeGetMethod(const js::Shape *shape) const; inline void setSlot(uintN slot, const js::Value &value); inline void initSlot(uintN slot, const js::Value &value); @@ -871,24 +791,20 @@ struct JSObject : js::gc::Cell { inline void setFixedSlot(uintN slot, const js::Value &value); inline void initFixedSlot(uintN slot, const js::Value &value); - /* Defined in jsscopeinlines.h to avoid including implementation dependencies here. */ - inline void updateShape(JSContext *cx); - inline void updateFlags(const js::Shape *shape, bool isDefinitelyAtom = false); - /* Extend this object to have shape as its last-added property. */ - inline void extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom = false); + inline bool extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom = false); /* * Whether this is the only object which has its specified type. This * object will have its type constructed lazily as needed by analysis. */ - bool hasSingletonType() const { return flags & SINGLETON_TYPE; } + bool hasSingletonType() const { return !!type_->singleton; } /* * Whether the object's type has not been constructed yet. If an object * might have a lazy type, use getType() below, otherwise type(). */ - bool hasLazyType() const { return flags & LAZY_TYPE; } + bool hasLazyType() const { return type_->lazy(); } /* * Marks this object as having a singleton type, and leave the type lazy. @@ -896,9 +812,6 @@ struct JSObject : js::gc::Cell { */ inline bool setSingletonType(JSContext *cx); - /* Called from GC, reverts a singleton object to having a lazy type. */ - inline void revertLazyType(); - inline js::types::TypeObject *getType(JSContext *cx); js::types::TypeObject *type() const { @@ -912,17 +825,29 @@ struct JSObject : js::gc::Cell { } static inline size_t offsetOfType() { return offsetof(JSObject, type_); } + inline js::HeapPtrTypeObject *addressOfType() { return &type_; } - inline void clearType(); inline void setType(js::types::TypeObject *newType); - inline void initType(js::types::TypeObject *newType); - inline js::types::TypeObject *getNewType(JSContext *cx, JSFunction *fun = NULL, - bool markUnknown = false); - private: - void makeNewType(JSContext *cx, JSFunction *fun, bool markUnknown); + js::types::TypeObject *getNewType(JSContext *cx, JSFunction *fun = NULL); + +#ifdef DEBUG + bool hasNewType(js::types::TypeObject *newType); +#endif + + /* + * Mark an object that has been iterated over and is a singleton. We need + * to recover this information in the object's type information after it + * is purged on GC. + */ + inline bool setIteratedSingleton(JSContext *cx); + + /* + * Mark an object as requiring its default 'new' type to have unknown + * properties. + */ + bool setNewTypeUnknown(JSContext *cx); - public: /* Set a new prototype for an object with a singleton type. */ bool splicePrototype(JSContext *cx, JSObject *proto); @@ -936,33 +861,77 @@ struct JSObject : js::gc::Cell { return type_->proto; } - JSObject *getParent() const { - return parent; - } + /* + * Parents and scope chains. + * + * All script-accessible objects with a NULL parent are global objects, + * and all global objects have a NULL parent. Some builtin objects which + * are not script-accessible also have a NULL parent, such as parser + * created functions for non-compileAndGo scripts. + * + * Except for the non-script-accessible builtins, the global with which an + * object is associated can be reached by following parent links to that + * global (see getGlobal()). + * + * The scope chain of an object is the link in the search path when a + * script does a name lookup on a scope object. For JS internal scope + * objects --- Call, Block, DeclEnv and With --- the chain is stored in + * the first fixed slot of the object, and the object's parent is the + * associated global. For other scope objects, the chain is stored in the + * object's parent. + * + * In compileAndGo code, scope chains can contain only internal scope + * objects with a global object at the root as the scope of the outermost + * non-function script. In non-compileAndGo code, the scope of the + * outermost non-function script might not be a global object, and can have + * a mix of other objects above it before the global object is reached. + */ - inline void clearParent(); - inline void setParent(JSObject *newParent); - inline void initParent(JSObject *newParent); + /* Access the parent link of an object. */ + inline JSObject *getParent() const; + bool setParent(JSContext *cx, JSObject *newParent); - JS_FRIEND_API(js::GlobalObject *) getGlobal() const; - - bool isGlobal() const { - return !!(getClass()->flags & JSCLASS_IS_GLOBAL); - } + /* Get the scope chain of an arbitrary scope object. */ + inline JSObject *scopeChain() const; + inline bool isGlobal() const; inline js::GlobalObject *asGlobal(); + inline js::GlobalObject *getGlobal() const; - void *getPrivate() const { - JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE); - return privateData; - } + inline bool isInternalScope() const; - inline void initPrivate(void *data); + /* Access the scope chain of an internal scope object. */ + inline JSObject *internalScopeChain() const; + inline bool setInternalScopeChain(JSContext *cx, JSObject *obj); + static inline size_t offsetOfInternalScopeChain(); + + /* + * Access the scope chain of a static block object. These do not appear + * on scope chains but mirror their structure, and can have a NULL + * scope chain. + */ + inline JSObject *getStaticBlockScopeChain() const; + inline void setStaticBlockScopeChain(JSObject *obj); + + /* Common fixed slot for the scope chain of internal scope objects. */ + static const uint32 SCOPE_CHAIN_SLOT = 0; + + /* Private data accessors. */ + + inline bool hasPrivate() const; + inline void *getPrivate() const; inline void setPrivate(void *data); + /* Access private data for an object with a known number of fixed slots. */ + inline void *getPrivate(size_t nfixed) const; + /* N.B. Infallible: NULL means 'no principal', not an error. */ inline JSPrincipals *principals(JSContext *cx); + /* Remove the type (and prototype) or parent from a new object. */ + inline bool clearType(JSContext *cx); + bool clearParent(JSContext *cx); + /* * ES5 meta-object properties and operations. */ @@ -980,8 +949,10 @@ struct JSObject : js::gc::Cell { bool isSealedOrFrozen(JSContext *cx, ImmutabilityType it, bool *resultp); + inline void *&privateRef(uint32 nfixed) const; + public: - bool isExtensible() const { return !(flags & NOT_EXTENSIBLE); } + inline bool isExtensible() const; bool preventExtensions(JSContext *cx, js::AutoIdVector *props); /* ES5 15.2.3.8: non-extensible, all props non-configurable */ @@ -1014,10 +985,46 @@ struct JSObject : js::gc::Cell { inline js::StringObject *asString(); inline js::RegExpObject *asRegExp(); + /* Accessors for elements. */ + + js::ObjectElements *getElementsHeader() const { + return js::ObjectElements::fromElements(elements); + } + + inline bool ensureElements(JSContext *cx, uintN cap); + bool growElements(JSContext *cx, uintN cap); + void shrinkElements(JSContext *cx, uintN cap); + + inline js::HeapValue* fixedElements() const { + JS_STATIC_ASSERT(2 * sizeof(js::Value) == sizeof(js::ObjectElements)); + return &fixedSlots()[2]; + } + + void setFixedElements() { this->elements = fixedElements(); } + + inline bool hasDynamicElements() const { + /* + * Note: for objects with zero fixed slots this could potentially give + * a spurious 'true' result, if the end of this object is exactly + * aligned with the end of its arena and dynamic slots are allocated + * immediately afterwards. Such cases cannot occur for dense arrays + * (which have at least two fixed slots) and can only result in a leak. + */ + return elements != js::emptyObjectElements && elements != fixedElements(); + } + + /* JIT Accessors */ + static inline size_t offsetOfElements() { return offsetof(JSObject, elements); } + static inline size_t offsetOfFixedElements() { + return sizeof(JSObject) + sizeof(js::ObjectElements); + } + /* * Array-specific getters and setters (for both dense and slow arrays). */ + bool allocateSlowArrayElements(JSContext *cx); + inline uint32 getArrayLength() const; inline void setArrayLength(JSContext *cx, uint32 length); @@ -1026,7 +1033,6 @@ struct JSObject : js::gc::Cell { inline void setDenseArrayLength(uint32 length); inline void setDenseArrayInitializedLength(uint32 length); inline void ensureDenseArrayInitializedLength(JSContext *cx, uintN index, uintN extra); - inline void backfillDenseArrayHoles(JSContext *cx); inline js::HeapValueArray getDenseArrayElements(); inline const js::Value &getDenseArrayElement(uintN idx); inline void setDenseArrayElement(uintN idx, const js::Value &val); @@ -1036,11 +1042,9 @@ struct JSObject : js::gc::Cell { inline void copyDenseArrayElements(uintN dstStart, const js::Value *src, uintN count); inline void initDenseArrayElements(uintN dstStart, const js::Value *src, uintN count); inline void moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count); - inline void shrinkDenseArrayElements(JSContext *cx, uintN cap); inline bool denseArrayHasInlineSlots() const; /* Packed information for this array. */ - inline bool isPackedDenseArray(); inline void markDenseArrayNotPacked(JSContext *cx); /* @@ -1113,61 +1117,18 @@ struct JSObject : js::gc::Cell { * Function-specific getters and setters. */ - private: friend struct JSFunction; - friend class js::mjit::Compiler; - /* - * Flat closures with one or more upvars snapshot the upvars' values into a - * vector of js::Values referenced from this slot. - */ - static const uint32 JSSLOT_FLAT_CLOSURE_UPVARS = 0; - - /* - * Null closures set or initialized as methods have these slots. See the - * "method barrier" comments and methods. - */ - - static const uint32 JSSLOT_FUN_METHOD_ATOM = 0; - static const uint32 JSSLOT_FUN_METHOD_OBJ = 1; - - static const uint32 JSSLOT_BOUND_FUNCTION_THIS = 0; - static const uint32 JSSLOT_BOUND_FUNCTION_ARGS_COUNT = 1; - - public: - static const uint32 FUN_CLASS_RESERVED_SLOTS = 2; - - static size_t getFlatClosureUpvarsOffset() { - return getFixedSlotOffset(JSSLOT_FLAT_CLOSURE_UPVARS); - } - - inline JSFunction *getFunctionPrivate() const; - - inline js::FlatClosureData *getFlatClosureData() const; - inline js::Value getFlatClosureUpvar(uint32 i) const; - inline const js::Value &getFlatClosureUpvar(uint32 i); - inline void setFlatClosureUpvar(uint32 i, const js::Value &v); - inline void setFlatClosureData(js::FlatClosureData *data); - - /* See comments in fun_finalize. */ - inline void finalizeUpvarsIfFlatClosure(); - - inline bool hasMethodObj(const JSObject& obj) const; - inline void setMethodObj(JSObject& obj); - - inline bool initBoundFunction(JSContext *cx, const js::Value &thisArg, - const js::Value *args, uintN argslen); - - inline JSObject *getBoundFunctionTarget() const; - inline const js::Value &getBoundFunctionThis() const; - inline const js::Value &getBoundFunctionArgument(uintN which) const; - inline size_t getBoundFunctionArgumentCount() const; + inline JSFunction *toFunction(); + inline const JSFunction *toFunction() const; public: /* * Iterator-specific getters and setters. */ + static const uint32 ITER_CLASS_NFIXED_SLOTS = 1; + inline js::NativeIterator *getNativeIterator() const; inline void setNativeIterator(js::NativeIterator *); @@ -1226,40 +1187,20 @@ struct JSObject : js::gc::Cell { */ inline bool isCallable(); - /* Do initialization required immediately after allocation. */ - inline void earlyInit(jsuword capacity); - - /* The map field is not initialized here and should be set separately. */ - void init(JSContext *cx, js::Class *aclasp, js::types::TypeObject *type, - JSObject *parent, void *priv, bool denseArray); - inline void finish(JSContext *cx); - JS_ALWAYS_INLINE void finalize(JSContext *cx); - - /* - * Like init, but also initializes map. proto must have an empty shape - * created for it via proto->getEmptyShape. - */ - inline bool initSharingEmptyShape(JSContext *cx, - js::Class *clasp, - js::types::TypeObject *type, - JSObject *parent, - void *priv, - js::gc::AllocKind kind); + JS_ALWAYS_INLINE void finalize(JSContext *cx, bool background); inline bool hasProperty(JSContext *cx, jsid id, bool *foundp, uintN flags = 0); /* - * Allocate and free an object slot. Note that freeSlot is infallible: it - * returns true iff this is a dictionary-mode object and the freed slot was - * added to the freelist. + * Allocate and free an object slot. * * FIXME: bug 593129 -- slot allocation should be done by object methods * after calling object-parameter-free shape methods, avoiding coupling * logic across the object vs. shape module wall. */ bool allocSlot(JSContext *cx, uint32 *slotp); - bool freeSlot(JSContext *cx, uint32 slot); + void freeSlot(JSContext *cx, uint32 slot); public: bool reportReadOnly(JSContext* cx, jsid id, uintN report = JSREPORT_ERROR); @@ -1284,11 +1225,11 @@ struct JSObject : js::gc::Cell { * 1. getter and setter must be normalized based on flags (see jsscope.cpp). * 2. !isExtensible() checking must be done by callers. */ - const js::Shape *addPropertyInternal(JSContext *cx, jsid id, - JSPropertyOp getter, JSStrictPropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid, - js::Shape **spp); + js::Shape *addPropertyInternal(JSContext *cx, jsid id, + JSPropertyOp getter, JSStrictPropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid, + js::Shape **spp, bool allowDictionary); bool toDictionaryMode(JSContext *cx); @@ -1299,30 +1240,28 @@ struct JSObject : js::gc::Cell { static void TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &reserved); - void updateFixedSlots(uintN fixed); - public: /* Add a property whose id is not yet in this scope. */ - const js::Shape *addProperty(JSContext *cx, jsid id, - JSPropertyOp getter, JSStrictPropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid); + js::Shape *addProperty(JSContext *cx, jsid id, + JSPropertyOp getter, JSStrictPropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid, bool allowDictionary = true); /* Add a data property whose id is not yet in this scope. */ - const js::Shape *addDataProperty(JSContext *cx, jsid id, uint32 slot, uintN attrs) { + js::Shape *addDataProperty(JSContext *cx, jsid id, uint32 slot, uintN attrs) { JS_ASSERT(!(attrs & (JSPROP_GETTER | JSPROP_SETTER))); return addProperty(cx, id, NULL, NULL, slot, attrs, 0, 0); } /* Add or overwrite a property for id in this scope. */ - const js::Shape *putProperty(JSContext *cx, jsid id, - JSPropertyOp getter, JSStrictPropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid); + js::Shape *putProperty(JSContext *cx, jsid id, + JSPropertyOp getter, JSStrictPropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid); /* Change the given property into a sibling with the same id in this scope. */ - const js::Shape *changeProperty(JSContext *cx, const js::Shape *shape, uintN attrs, uintN mask, - JSPropertyOp getter, JSStrictPropertyOp setter); + js::Shape *changeProperty(JSContext *cx, js::Shape *shape, uintN attrs, uintN mask, + JSPropertyOp getter, JSStrictPropertyOp setter); /* Remove the property named by id from this object. */ bool removeProperty(JSContext *cx, jsid id); @@ -1343,6 +1282,7 @@ struct JSObject : js::gc::Cell { JSPropertyOp getter = JS_PropertyStub, JSStrictPropertyOp setter = JS_StrictPropertyStub, uintN attrs = JSPROP_ENUMERATE); + inline JSBool defineElement(JSContext *cx, uint32 index, const js::Value &value, JSPropertyOp getter = JS_PropertyStub, JSStrictPropertyOp setter = JS_StrictPropertyStub, @@ -1373,7 +1313,6 @@ struct JSObject : js::gc::Cell { inline JSBool setSpecial(JSContext *cx, js::SpecialId sid, js::Value *vp, JSBool strict); JSBool nonNativeSetProperty(JSContext *cx, jsid id, js::Value *vp, JSBool strict); - JSBool nonNativeSetElement(JSContext *cx, uint32 index, js::Value *vp, JSBool strict); inline JSBool getGenericAttributes(JSContext *cx, jsid id, uintN *attrsp); @@ -1391,28 +1330,10 @@ struct JSObject : js::gc::Cell { inline JSBool deleteElement(JSContext *cx, uint32 index, js::Value *rval, JSBool strict); inline JSBool deleteSpecial(JSContext *cx, js::SpecialId sid, js::Value *rval, JSBool strict); - JSBool enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp) { - JSNewEnumerateOp op = getOps()->enumerate; - return (op ? op : JS_EnumerateState)(cx, this, iterop, statep, idp); - } - - bool defaultValue(JSContext *cx, JSType hint, js::Value *vp) { - JSConvertOp op = getClass()->convert; - bool ok = (op == JS_ConvertStub ? js::DefaultValue : op)(cx, this, hint, vp); - JS_ASSERT_IF(ok, vp->isPrimitive()); - return ok; - } - - JSType typeOf(JSContext *cx) { - js::TypeOfOp op = getOps()->typeOf; - return (op ? op : js_TypeOf)(cx, this); - } - - /* These four are time-optimized to avoid stub calls. */ - JSObject *thisObject(JSContext *cx) { - JSObjectOp op = getOps()->thisObject; - return op ? op(cx, this) : this; - } + inline bool enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp); + inline bool defaultValue(JSContext *cx, JSType hint, js::Value *vp); + inline JSType typeOf(JSContext *cx); + inline JSObject *thisObject(JSContext *cx); static bool thisObject(JSContext *cx, const js::Value &v, js::Value *vp); @@ -1422,74 +1343,61 @@ struct JSObject : js::gc::Cell { const js::Shape *defineBlockVariable(JSContext *cx, jsid id, intN index); - inline bool canHaveMethodBarrier() const; - - inline bool isArguments() const { return isNormalArguments() || isStrictArguments(); } - inline bool isArrayBuffer() const { return clasp == &js::ArrayBufferClass; } - inline bool isNormalArguments() const { return clasp == &js::NormalArgumentsObjectClass; } - inline bool isStrictArguments() const { return clasp == &js::StrictArgumentsObjectClass; } - inline bool isArray() const { return isSlowArray() || isDenseArray(); } - inline bool isDenseArray() const { return clasp == &js::ArrayClass; } - inline bool isSlowArray() const { return clasp == &js::SlowArrayClass; } - inline bool isNumber() const { return clasp == &js::NumberClass; } - inline bool isBoolean() const { return clasp == &js::BooleanClass; } - inline bool isString() const { return clasp == &js::StringClass; } - inline bool isPrimitive() const { return isNumber() || isString() || isBoolean(); } - inline bool isDate() const { return clasp == &js::DateClass; } - inline bool isFunction() const { return clasp == &js::FunctionClass; } - inline bool isObject() const { return clasp == &js::ObjectClass; } - inline bool isWith() const { return clasp == &js::WithClass; } - inline bool isBlock() const { return clasp == &js::BlockClass; } - inline bool isStaticBlock() const { return isBlock() && !getProto(); } - inline bool isClonedBlock() const { return isBlock() && !!getProto(); } - inline bool isCall() const { return clasp == &js::CallClass; } - inline bool isDeclEnv() const { return clasp == &js::DeclEnvClass; } - inline bool isRegExp() const { return clasp == &js::RegExpClass; } - inline bool isGenerator() const { return clasp == &js::GeneratorClass; } - inline bool isIterator() const { return clasp == &js::IteratorClass; } - inline bool isStopIteration() const { return clasp == &js::StopIterationClass; } - inline bool isError() const { return clasp == &js::ErrorClass; } - inline bool isXML() const { return clasp == &js::XMLClass; } - inline bool isNamespace() const { return clasp == &js::NamespaceClass; } - inline bool isWeakMap() const { return clasp == &js::WeakMapClass; } + inline bool isArguments() const; + inline bool isArrayBuffer() const; + inline bool isNormalArguments() const; + inline bool isStrictArguments() const; + inline bool isArray() const; + inline bool isDenseArray() const; + inline bool isSlowArray() const; + inline bool isNumber() const; + inline bool isBoolean() const; + inline bool isString() const; + inline bool isPrimitive() const; + inline bool isDate() const; + inline bool isFunction() const; + inline bool isObject() const; + inline bool isWith() const; + inline bool isBlock() const; + inline bool isStaticBlock() const; + inline bool isClonedBlock() const; + inline bool isCall() const; + inline bool isDeclEnv() const; + inline bool isRegExp() const; + inline bool isScript() const; + inline bool isGenerator() const; + inline bool isIterator() const; + inline bool isStopIteration() const; + inline bool isError() const; + inline bool isXML() const; + inline bool isNamespace() const; + inline bool isWeakMap() const; + inline bool isFunctionProxy() const; inline bool isProxy() const; - inline bool isXMLId() const { - return clasp == &js::QNameClass || clasp == &js::AttributeNameClass || clasp == &js::AnyNameClass; - } - inline bool isQName() const { - return clasp == &js::QNameClass || clasp == &js::AttributeNameClass || clasp == &js::AnyNameClass; - } + inline bool isXMLId() const; + inline bool isQName() const; inline bool isWrapper() const; inline bool isCrossCompartmentWrapper() const; inline void initArrayClass(); - static void staticAsserts() { - /* Check alignment for any fixed slots allocated after the object. */ - JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(js::Value) == 0); - - JS_STATIC_ASSERT(offsetof(JSObject, clasp) == offsetof(js::shadow::Object, clasp)); - JS_STATIC_ASSERT(offsetof(JSObject, flags) == offsetof(js::shadow::Object, flags)); - JS_STATIC_ASSERT(offsetof(JSObject, objShape) == offsetof(js::shadow::Object, objShape)); - JS_STATIC_ASSERT(offsetof(JSObject, parent) == offsetof(js::shadow::Object, parent)); - JS_STATIC_ASSERT(offsetof(JSObject, privateData) == offsetof(js::shadow::Object, privateData)); - JS_STATIC_ASSERT(offsetof(JSObject, capacity) == offsetof(js::shadow::Object, capacity)); - JS_STATIC_ASSERT(offsetof(JSObject, slots) == offsetof(js::shadow::Object, slots)); - JS_STATIC_ASSERT(offsetof(JSObject, type_) == offsetof(js::shadow::Object, type)); - JS_STATIC_ASSERT(sizeof(JSObject) == sizeof(js::shadow::Object)); - JS_STATIC_ASSERT(FIXED_SLOTS_SHIFT == js::shadow::Object::FIXED_SLOTS_SHIFT); - } - - /*** For jit compiler: ***/ - - static size_t offsetOfClassPointer() { return offsetof(JSObject, clasp); } - static inline void writeBarrierPre(JSObject *obj); static inline void writeBarrierPost(JSObject *obj, void *addr); inline void privateWriteBarrierPre(void **oldval); inline void privateWriteBarrierPost(void **oldval); + + private: + static void staticAsserts() { + /* Check alignment for any fixed slots allocated after the object. */ + JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(js::Value) == 0); + + JS_STATIC_ASSERT(offsetof(JSObject, shape_) == offsetof(js::shadow::Object, shape)); + JS_STATIC_ASSERT(offsetof(JSObject, slots) == offsetof(js::shadow::Object, slots)); + JS_STATIC_ASSERT(offsetof(JSObject, type_) == offsetof(js::shadow::Object, type)); + JS_STATIC_ASSERT(sizeof(JSObject) == sizeof(js::shadow::Object)); + } }; /* @@ -1518,7 +1426,7 @@ JSObject::fixedSlots() const inline size_t JSObject::numFixedSlots() const { - return flags >> FIXED_SLOTS_SHIFT; + return reinterpret_cast(this)->numFixedSlots(); } /* static */ inline size_t @@ -1526,6 +1434,11 @@ JSObject::getFixedSlotOffset(size_t slot) { return sizeof(JSObject) + (slot * sizeof(js::Value)); } +/* static */ inline size_t +JSObject::getPrivateDataOffset(size_t nfixed) { + return getFixedSlotOffset(nfixed); +} + struct JSObject_Slots2 : JSObject { js::Value fslots[2]; }; struct JSObject_Slots4 : JSObject { js::Value fslots[4]; }; struct JSObject_Slots8 : JSObject { js::Value fslots[8]; }; @@ -1549,20 +1462,6 @@ struct JSObject_Slots16 : JSObject { js::Value fslots[16]; }; #endif /* JS_THREADSAFE */ -inline void -OBJ_TO_INNER_OBJECT(JSContext *cx, JSObject *&obj) -{ - if (JSObjectOp op = obj->getClass()->ext.innerObject) - obj = op(cx, obj); -} - -inline void -OBJ_TO_OUTER_OBJECT(JSContext *cx, JSObject *&obj) -{ - if (JSObjectOp op = obj->getClass()->ext.outerObject) - obj = op(cx, obj); -} - class JSValueArray { public: jsval *array; @@ -1583,21 +1482,22 @@ class ValueArray { * Block scope object macros. The slots reserved by BlockClass are: * * private StackFrame * active frame pointer or null + * JSSLOT_SCOPE_CHAIN JSObject * scope chain, as for other scopes * JSSLOT_BLOCK_DEPTH int depth of block slots in frame * * After JSSLOT_BLOCK_DEPTH come one or more slots for the block locals. * - * A With object is like a Block object, in that both have one reserved slot + * A With object is like a Block object, in that both have a reserved slot * telling the stack depth of the relevant slots (the slot whose value is the * object named in the with statement, the slots containing the block's local * variables); and both have a private slot referring to the StackFrame in * whose activation they were created (or null if the with or block object * outlives the frame). */ -static const uint32 JSSLOT_BLOCK_DEPTH = 0; +static const uint32 JSSLOT_BLOCK_DEPTH = 1; static const uint32 JSSLOT_BLOCK_FIRST_FREE_SLOT = JSSLOT_BLOCK_DEPTH + 1; -static const uint32 JSSLOT_WITH_THIS = 1; +static const uint32 JSSLOT_WITH_THIS = 2; #define OBJ_BLOCK_COUNT(cx,obj) \ (obj)->propertyCount() @@ -1684,28 +1584,89 @@ extern JSFunctionSpec object_static_methods[]; namespace js { -JSObject * -DefineConstructorAndPrototype(JSContext *cx, JSObject *obj, JSProtoKey key, JSAtom *atom, - JSObject *protoProto, Class *clasp, - Native constructor, uintN nargs, - JSPropertySpec *ps, JSFunctionSpec *fs, - JSPropertySpec *static_ps, JSFunctionSpec *static_fs, - JSObject **ctorp = NULL); - bool IsStandardClassResolved(JSObject *obj, js::Class *clasp); void MarkStandardClassInitializedNoProto(JSObject *obj, js::Class *clasp); -} +/* + * Cache for speeding up repetitive creation of objects in the VM. + * When an object is created which matches the criteria in the 'key' section + * below, an entry is filled with the resulting object. + */ +class NewObjectCache +{ + struct Entry + { + /* Class of the constructed object. */ + Class *clasp; -extern JSObject * -js_InitClass(JSContext *cx, JSObject *obj, JSObject *parent_proto, - js::Class *clasp, JSNative constructor, uintN nargs, - JSPropertySpec *ps, JSFunctionSpec *fs, - JSPropertySpec *static_ps, JSFunctionSpec *static_fs, - JSObject **ctorp = NULL); + /* + * Key with one of three possible values: + * + * - Global for the object. The object must have a standard class for + * which the global's prototype can be determined, and the object's + * parent will be the global. + * + * - Prototype for the object (cannot be global). The object's parent + * will be the prototype's parent. + * + * - Type for the object. The object's parent will be the type's + * prototype's parent. + */ + gc::Cell *key; + + /* Allocation kind for the constructed object. */ + gc::AllocKind kind; + + /* Number of bytes to copy from the template object. */ + uint32 nbytes; + + /* + * Template object to copy from, with the initial values of fields, + * fixed slots (undefined) and private data (NULL). + */ + JSObject_Slots16 templateObject; + }; + + Entry entries[41]; + + void staticAsserts() { + JS_STATIC_ASSERT(gc::FINALIZE_OBJECT_LAST == gc::FINALIZE_OBJECT16_BACKGROUND); + } + + public: + + typedef int EntryIndex; + + void reset() { PodZero(this); } + + /* + * Get the entry index for the given lookup, return whether there was a hit + * on an existing entry. + */ + inline bool lookupProto(Class *clasp, JSObject *proto, gc::AllocKind kind, EntryIndex *pentry); + inline bool lookupGlobal(Class *clasp, js::GlobalObject *global, gc::AllocKind kind, EntryIndex *pentry); + inline bool lookupType(Class *clasp, js::types::TypeObject *type, gc::AllocKind kind, EntryIndex *pentry); + + /* Return a new object from a cache hit produced by a lookup method. */ + inline JSObject *newObjectFromHit(JSContext *cx, EntryIndex entry); + + /* Fill an entry after a cache miss. */ + inline void fillProto(EntryIndex entry, Class *clasp, JSObject *proto, gc::AllocKind kind, JSObject *obj); + inline void fillGlobal(EntryIndex entry, Class *clasp, js::GlobalObject *global, gc::AllocKind kind, JSObject *obj); + inline void fillType(EntryIndex entry, Class *clasp, js::types::TypeObject *type, gc::AllocKind kind, JSObject *obj); + + /* Invalidate any entries which might produce an object with shape/proto. */ + void invalidateEntriesForShape(JSContext *cx, Shape *shape, JSObject *proto); + + private: + inline bool lookup(Class *clasp, gc::Cell *key, gc::AllocKind kind, EntryIndex *pentry); + inline void fill(EntryIndex entry, Class *clasp, gc::Cell *key, gc::AllocKind kind, JSObject *obj); +}; + +} /* namespace js */ /* * Select Object.prototype method names shared between jsapi.cpp and jsobj.cpp. @@ -1761,27 +1722,11 @@ js_CreateThis(JSContext *cx, JSObject *callee); extern jsid js_CheckForStringIndex(jsid id); -/* - * js_PurgeScopeChain does nothing if obj is not itself a prototype or parent - * scope, else it reshapes the scope and prototype chains it links. It calls - * js_PurgeScopeChainHelper, which asserts that obj is flagged as a delegate - * (i.e., obj has ever been on a prototype or parent chain). - */ -extern void -js_PurgeScopeChainHelper(JSContext *cx, JSObject *obj, jsid id); - -inline void -js_PurgeScopeChain(JSContext *cx, JSObject *obj, jsid id) -{ - if (obj->isDelegate()) - js_PurgeScopeChainHelper(cx, obj, id); -} - /* * Find or create a property named by id in obj's scope, with the given getter * and setter, slot, attributes, and other members. */ -extern const js::Shape * +extern js::Shape * js_AddNativeProperty(JSContext *cx, JSObject *obj, jsid id, JSPropertyOp getter, JSStrictPropertyOp setter, uint32 slot, uintN attrs, uintN flags, intN shortid); @@ -1791,9 +1736,9 @@ js_AddNativeProperty(JSContext *cx, JSObject *obj, jsid id, * it into a potentially new js::Shape. Return a pointer to the changed * or identical property. */ -extern const js::Shape * +extern js::Shape * js_ChangeNativePropertyAttrs(JSContext *cx, JSObject *obj, - const js::Shape *shape, uintN attrs, uintN mask, + js::Shape *shape, uintN attrs, uintN mask, JSPropertyOp getter, JSStrictPropertyOp setter); extern JSBool @@ -1857,22 +1802,6 @@ ReadPropertyDescriptors(JSContext *cx, JSObject *props, bool checkAccessors, */ static const uintN RESOLVE_INFER = 0xffff; -/* - * We cache name lookup results only for the global object or for native - * non-global objects without prototype or with prototype that never mutates, - * see bug 462734 and bug 487039. - */ -static inline bool -IsCacheableNonGlobalScope(JSObject *obj) -{ - JS_ASSERT(obj->getParent()); - - bool cacheable = (obj->isCall() || obj->isBlock() || obj->isDeclEnv()); - - JS_ASSERT_IF(cacheable, !obj->getOps()->lookupProperty); - return cacheable; -} - } /* @@ -2011,25 +1940,6 @@ ToObject(JSContext *cx, Value *vp) return ToObjectSlow(cx, vp); } -/* ES5 9.1 ToPrimitive(input). */ -static JS_ALWAYS_INLINE bool -ToPrimitive(JSContext *cx, Value *vp) -{ - if (vp->isPrimitive()) - return true; - return vp->toObject().defaultValue(cx, JSTYPE_VOID, vp); -} - -/* ES5 9.1 ToPrimitive(input, PreferredType). */ -static JS_ALWAYS_INLINE bool -ToPrimitive(JSContext *cx, JSType preferredType, Value *vp) -{ - JS_ASSERT(preferredType != JSTYPE_VOID); /* Use the other ToPrimitive! */ - if (vp->isPrimitive()) - return true; - return vp->toObject().defaultValue(cx, preferredType, vp); -} - } /* namespace js */ /* diff --git a/js/src/jsobjinlines.h b/js/src/jsobjinlines.h index f19d9e332db8..caaf3c2a0f8b 100644 --- a/js/src/jsobjinlines.h +++ b/js/src/jsobjinlines.h @@ -80,56 +80,69 @@ #include "vm/String-inl.h" inline bool -JSObject::preventExtensions(JSContext *cx, js::AutoIdVector *props) +JSObject::hasPrivate() const { - JS_ASSERT(isExtensible()); + return getClass()->hasPrivate(); +} - if (js::FixOp fix = getOps()->fix) { - bool success; - if (!fix(cx, this, &success, props)) - return false; - if (!success) { - JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_CANT_CHANGE_EXTENSIBILITY); - return false; - } - } else { - if (!js::GetPropertyNames(cx, this, JSITER_HIDDEN | JSITER_OWNONLY, props)) - return false; - } +inline void *& +JSObject::privateRef(uint32 nfixed) const +{ + /* + * The private pointer of an object can hold any word sized value. + * Private pointers are stored immediately after the last fixed slot of + * the object. + */ + JS_ASSERT(nfixed == numFixedSlots()); + JS_ASSERT(hasPrivate()); + js::HeapValue *end = &fixedSlots()[nfixed]; + return *reinterpret_cast(end); +} - if (isNative()) - extensibleShapeChange(cx); +inline void * +JSObject::getPrivate() const { return privateRef(numFixedSlots()); } - flags |= NOT_EXTENSIBLE; - return true; +inline void * +JSObject::getPrivate(size_t nfixed) const { return privateRef(nfixed); } + +inline void +JSObject::setPrivate(void *data) +{ + void **pprivate = &privateRef(numFixedSlots()); + + privateWriteBarrierPre(pprivate); + *pprivate = data; + privateWriteBarrierPost(pprivate); } inline bool -JSObject::brand(JSContext *cx) +JSObject::enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp) { - JS_ASSERT(!generic()); - JS_ASSERT(!branded()); - JS_ASSERT(isNative()); - JS_ASSERT(!cx->typeInferenceEnabled()); - generateOwnShape(cx); - if (js_IsPropertyCacheDisabled(cx)) // check for rt->shapeGen overflow - return false; - flags |= BRANDED; - return true; + JSNewEnumerateOp op = getOps()->enumerate; + return (op ? op : JS_EnumerateState)(cx, this, iterop, statep, idp); } inline bool -JSObject::unbrand(JSContext *cx) +JSObject::defaultValue(JSContext *cx, JSType hint, js::Value *vp) { - JS_ASSERT(isNative()); - if (branded()) { - generateOwnShape(cx); - if (js_IsPropertyCacheDisabled(cx)) // check for rt->shapeGen overflow - return false; - flags &= ~BRANDED; - } - setGeneric(); - return true; + JSConvertOp op = getClass()->convert; + bool ok = (op == JS_ConvertStub ? js::DefaultValue : op)(cx, this, hint, vp); + JS_ASSERT_IF(ok, vp->isPrimitive()); + return ok; +} + +inline JSType +JSObject::typeOf(JSContext *cx) +{ + js::TypeOfOp op = getOps()->typeOf; + return (op ? op : js_TypeOf)(cx, this); +} + +inline JSObject * +JSObject::thisObject(JSContext *cx) +{ + JSObjectOp op = getOps()->thisObject; + return op ? op(cx, this) : this; } inline JSBool @@ -251,175 +264,122 @@ JSObject::deleteSpecial(JSContext *cx, js::SpecialId sid, js::Value *rval, JSBoo } inline void -JSObject::syncSpecialEquality() +JSObject::finalize(JSContext *cx, bool background) { - if (getClass()->ext.equality) { - flags |= JSObject::HAS_EQUALITY; - JS_ASSERT_IF(!hasLazyType(), type()->hasAnyFlags(js::types::OBJECT_FLAG_SPECIAL_EQUALITY)); - } -} - -inline void -JSObject::finalize(JSContext *cx) -{ - /* Cope with stillborn objects that have no map. */ - if (isNewborn()) - return; - js::Probes::finalizeObject(this); - /* Finalize obj first, in case it needs map and slots. */ - js::Class *clasp = getClass(); - if (clasp->finalize) - clasp->finalize(cx, this); + if (!background) { + /* + * Finalize obj first, in case it needs map and slots. Objects with + * finalize hooks are not finalized in the background, as the class is + * stored in the object's shape, which may have already been destroyed. + */ + js::Class *clasp = getClass(); + if (clasp->finalize) + clasp->finalize(cx, this); + } finish(cx); } -/* - * Initializer for Call objects for functions and eval frames. Set class, - * parent, map, and shape, and allocate slots. - */ -inline void -JSObject::initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent) +inline JSObject * +JSObject::getParent() const { - init(cx, &js::CallClass, &js::types::emptyTypeObject, parent, NULL, false); - lastProp.init(bindings.lastShape()); - - /* - * If |bindings| is for a function that has extensible parents, that means - * its Call should have its own shape; see js::Bindings::extensibleParents. - */ - if (bindings.extensibleParents()) - setOwnShape(js_GenerateShape(cx)); - else - objShape = lastProp->shapeid; + return lastProperty()->getObjectParent(); } -/* - * Initializer for cloned block objects. Set class, prototype, frame, map, and - * shape. - */ -inline void -JSObject::initClonedBlock(JSContext *cx, js::types::TypeObject *type, js::StackFrame *frame) +inline bool +JSObject::isInternalScope() const { - init(cx, &js::BlockClass, type, NULL, frame, false); - - /* Cloned blocks copy their prototype's map; it had better be shareable. */ - JS_ASSERT(!getProto()->inDictionaryMode() || getProto()->lastProp->frozen()); - lastProp = getProto()->lastProp; - - /* - * If the prototype has its own shape, that means the clone should, too; see - * js::Bindings::extensibleParents. - */ - if (getProto()->hasOwnShape()) - setOwnShape(js_GenerateShape(cx)); - else - objShape = lastProp->shapeid; + return isCall() || isDeclEnv() || isBlock() || isWith(); } -/* - * Mark a compile-time block as OWN_SHAPE, indicating that its run-time clones - * also need unique shapes. See js::Bindings::extensibleParents. - */ -inline void -JSObject::setBlockOwnShape(JSContext *cx) +inline JSObject * +JSObject::internalScopeChain() const +{ + JS_ASSERT(isInternalScope()); + return &getFixedSlot(SCOPE_CHAIN_SLOT).toObject(); +} + +inline bool +JSObject::setInternalScopeChain(JSContext *cx, JSObject *obj) +{ + JS_ASSERT(isInternalScope()); + if (!obj->setDelegate(cx)) + return false; + setFixedSlot(SCOPE_CHAIN_SLOT, JS::ObjectValue(*obj)); + return true; +} + +/*static*/ inline size_t +JSObject::offsetOfInternalScopeChain() +{ + return getFixedSlotOffset(SCOPE_CHAIN_SLOT); +} + +inline JSObject * +JSObject::scopeChain() const +{ + return isInternalScope() ? internalScopeChain() : getParent(); +} + +inline JSObject * +JSObject::getStaticBlockScopeChain() const { JS_ASSERT(isStaticBlock()); - setOwnShape(js_GenerateShape(cx)); + return getFixedSlot(SCOPE_CHAIN_SLOT).toObjectOrNull(); +} + +inline void +JSObject::setStaticBlockScopeChain(JSObject *obj) +{ + JS_ASSERT(isStaticBlock()); + setFixedSlot(SCOPE_CHAIN_SLOT, JS::ObjectOrNullValue(obj)); } /* * Property read barrier for deferred cloning of compiler-created function * objects optimized as typically non-escaping, ad-hoc methods in obj. */ -inline const js::Shape * +inline js::Shape * JSObject::methodReadBarrier(JSContext *cx, const js::Shape &shape, js::Value *vp) { - JS_ASSERT(canHaveMethodBarrier()); - JS_ASSERT(hasMethodBarrier()); JS_ASSERT(nativeContains(cx, shape)); JS_ASSERT(shape.isMethod()); - JS_ASSERT(shape.methodObject() == vp->toObject()); JS_ASSERT(shape.writable()); - JS_ASSERT(shape.slot != SHAPE_INVALID_SLOT); + JS_ASSERT(shape.hasSlot()); JS_ASSERT(shape.hasDefaultSetter()); JS_ASSERT(!isGlobal()); /* i.e. we are not changing the global shape */ - JSObject *funobj = &vp->toObject(); - JSFunction *fun = funobj->getFunctionPrivate(); - JS_ASSERT(fun == funobj); + JSFunction *fun = vp->toObject().toFunction(); + JS_ASSERT(!fun->isClonedMethod()); JS_ASSERT(fun->isNullClosure()); - funobj = CloneFunctionObject(cx, fun); - if (!funobj) + fun = js::CloneFunctionObject(cx, fun); + if (!fun) return NULL; - funobj->setMethodObj(*this); + fun->setMethodObj(*this); /* * Replace the method property with an ordinary data property. This is * equivalent to this->setProperty(cx, shape.id, vp) except that any * watchpoint on the property is not triggered. */ - uint32 slot = shape.slot; - const js::Shape *newshape = methodShapeChange(cx, shape); + uint32 slot = shape.slot(); + js::Shape *newshape = methodShapeChange(cx, shape); if (!newshape) return NULL; JS_ASSERT(!newshape->isMethod()); - JS_ASSERT(newshape->slot == slot); - vp->setObject(*funobj); + JS_ASSERT(newshape->slot() == slot); + vp->setObject(*fun); nativeSetSlot(slot, *vp); return newshape; } -static JS_ALWAYS_INLINE bool -ChangesMethodValue(const js::Value &prev, const js::Value &v) -{ - JSObject *prevObj; - return prev.isObject() && (prevObj = &prev.toObject())->isFunction() && - (!v.isObject() || &v.toObject() != prevObj); -} - -inline const js::Shape * -JSObject::methodWriteBarrier(JSContext *cx, const js::Shape &shape, const js::Value &v) -{ - if (brandedOrHasMethodBarrier() && shape.slot != SHAPE_INVALID_SLOT) { - const js::Value &prev = nativeGetSlot(shape.slot); - - if (ChangesMethodValue(prev, v)) - return methodShapeChange(cx, shape); - } - return &shape; -} - inline bool -JSObject::methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v) +JSObject::canHaveMethodBarrier() const { - if (brandedOrHasMethodBarrier()) { - const js::Value &prev = nativeGetSlot(slot); - - if (ChangesMethodValue(prev, v)) - return methodShapeChange(cx, slot); - } - return true; -} - -inline const js::HeapValue * -JSObject::getRawSlots() -{ - JS_ASSERT(isGlobal()); - return slots; -} - -inline const js::HeapValue * -JSObject::getRawSlot(size_t slot, const js::HeapValue *slots) -{ - JS_ASSERT(isGlobal()); - size_t fixed = numFixedSlots(); - if (slot < fixed) - return fixedSlots() + slot; - return slots + slot - fixed; + return isObject() || isFunction() || isPrimitive() || isDate(); } inline bool @@ -429,13 +389,6 @@ JSObject::isFixedSlot(size_t slot) return slot < numFixedSlots(); } -inline size_t -JSObject::numDynamicSlots(size_t capacity) const -{ - JS_ASSERT(capacity >= numFixedSlots()); - return isDenseArray() ? capacity : capacity - numFixedSlots(); -} - inline size_t JSObject::dynamicSlotIndex(size_t slot) { @@ -443,22 +396,79 @@ JSObject::dynamicSlotIndex(size_t slot) return slot - numFixedSlots(); } -inline bool -JSObject::ensureClassReservedSlots(JSContext *cx) +/*static*/ inline size_t +JSObject::dynamicSlotsCount(size_t nfixed, size_t span) { - return !nativeEmpty() || ensureClassReservedSlotsForEmptyObject(cx); + if (span <= nfixed) + return 0; + span -= nfixed; + if (span <= SLOT_CAPACITY_MIN) + return SLOT_CAPACITY_MIN; + + size_t slots = js::RoundUpPow2(span); + JS_ASSERT(slots >= span); + return slots; +} + +inline size_t +JSObject::numDynamicSlots() const +{ + return dynamicSlotsCount(numFixedSlots(), slotSpan()); +} + +inline void +JSObject::setLastPropertyInfallible(const js::Shape *shape) +{ + JS_ASSERT(!shape->inDictionary()); + JS_ASSERT(shape->compartment() == compartment()); + JS_ASSERT(!inDictionaryMode()); + JS_ASSERT(slotSpan() == shape->slotSpan()); + JS_ASSERT(numFixedSlots() == shape->numFixedSlots()); + + shape_ = const_cast(shape); +} + +inline void +JSObject::removeLastProperty(JSContext *cx) +{ + JS_ASSERT(canRemoveLastProperty()); + JS_ALWAYS_TRUE(setLastProperty(cx, lastProperty()->previous())); +} + +inline bool +JSObject::canRemoveLastProperty() +{ + /* + * Check that the information about the object stored in the last + * property's base shape is consistent with that stored in the previous + * shape. If not consistent, then the last property cannot be removed as it + * will induce a change in the object itself, and the object must be + * converted to dictionary mode instead. See BaseShape comment in jsscope.h + */ + JS_ASSERT(!inDictionaryMode()); + const js::Shape *previous = lastProperty()->previous(); + return previous->getObjectParent() == lastProperty()->getObjectParent() + && previous->getObjectFlags() == lastProperty()->getObjectFlags(); +} + +inline const js::HeapValue * +JSObject::getRawSlots() +{ + JS_ASSERT(isGlobal()); + return slots; } inline js::Value JSObject::getReservedSlot(uintN index) const { - return (index < numSlots()) ? getSlot(index) : js::UndefinedValue(); + JS_ASSERT(index < JSSLOT_FREE(getClass())); + return getSlot(index); } inline js::HeapValue & JSObject::getReservedSlotRef(uintN index) { - JS_ASSERT(index < numSlots()); + JS_ASSERT(index < JSSLOT_FREE(getClass())); return getSlotRef(index); } @@ -469,12 +479,6 @@ JSObject::setReservedSlot(uintN index, const js::Value &v) setSlot(index, v); } -inline bool -JSObject::canHaveMethodBarrier() const -{ - return isObject() || isFunction() || isPrimitive() || isDate(); -} - inline const js::Value & JSObject::getPrimitiveThis() const { @@ -489,14 +493,6 @@ JSObject::setPrimitiveThis(const js::Value &pthis) setFixedSlot(JSSLOT_PRIMITIVE_THIS, pthis); } -inline bool -JSObject::hasSlotsArray() const -{ - JS_ASSERT_IF(!slots, !isDenseArray()); - JS_ASSERT_IF(slots == fixedSlots(), isDenseArray() || isArrayBuffer()); - return slots && slots != fixedSlots(); -} - inline bool JSObject::hasContiguousSlots(size_t start, size_t count) const { @@ -504,51 +500,31 @@ JSObject::hasContiguousSlots(size_t start, size_t count) const * Check that the range [start, start+count) is either all inline or all * out of line. */ - JS_ASSERT(start + count <= numSlots()); + JS_ASSERT(slotInRange(start + count, SENTINEL_ALLOWED)); return (start + count <= numFixedSlots()) || (start >= numFixedSlots()); } inline void JSObject::prepareSlotRangeForOverwrite(size_t start, size_t end) { - if (isDenseArray()) { - JS_ASSERT(end <= initializedLength()); - for (size_t i = start; i < end; i++) - slots[i].js::HeapValue::~HeapValue(); - } else { - for (size_t i = start; i < end; i++) - getSlotRef(i).js::HeapValue::~HeapValue(); - } + for (size_t i = start; i < end; i++) + getSlotAddressUnchecked(i)->js::HeapValue::~HeapValue(); } -inline size_t -JSObject::structSize() const +inline void +JSObject::prepareElementRangeForOverwrite(size_t start, size_t end) { - return (isFunction() && !getPrivate()) - ? sizeof(JSFunction) - : (sizeof(JSObject) + sizeof(js::Value) * numFixedSlots()); -} - -inline size_t -JSObject::slotsAndStructSize() const -{ - int ndslots = 0; - if (isDenseArray()) { - if (!denseArrayHasInlineSlots()) - ndslots = numSlots(); - } else { - if (slots) - ndslots = numSlots() - numFixedSlots(); - } - - return structSize() + sizeof(js::Value) * ndslots; + JS_ASSERT(isDenseArray()); + JS_ASSERT(end <= getDenseArrayInitializedLength()); + for (size_t i = start; i < end; i++) + elements[i].js::HeapValue::~HeapValue(); } inline uint32 JSObject::getArrayLength() const { JS_ASSERT(isArray()); - return (uint32)(uintptr_t) getPrivate(); + return getElementsHeader()->length; } inline void @@ -569,7 +545,7 @@ JSObject::setArrayLength(JSContext *cx, uint32 length) js::types::Type::DoubleType()); } - privateData = (void*)(uintptr_t) length; + getElementsHeader()->length = length; } inline void @@ -578,42 +554,65 @@ JSObject::setDenseArrayLength(uint32 length) /* Variant of setArrayLength for use on dense arrays where the length cannot overflow int32. */ JS_ASSERT(isDenseArray()); JS_ASSERT(length <= INT32_MAX); - privateData = (void*)(uintptr_t) length; + getElementsHeader()->length = length; +} + +inline uint32 +JSObject::getDenseArrayInitializedLength() +{ + JS_ASSERT(isDenseArray()); + return getElementsHeader()->initializedLength; +} + +inline void +JSObject::setDenseArrayInitializedLength(uint32 length) +{ + JS_ASSERT(isDenseArray()); + JS_ASSERT(length <= getDenseArrayCapacity()); + getElementsHeader()->initializedLength = length; } inline uint32 JSObject::getDenseArrayCapacity() { JS_ASSERT(isDenseArray()); - return numSlots(); + return getElementsHeader()->capacity; +} + +inline bool +JSObject::ensureElements(JSContext *cx, uint32 capacity) +{ + if (capacity > getDenseArrayCapacity()) + return growElements(cx, capacity); + return true; } inline js::HeapValueArray JSObject::getDenseArrayElements() { JS_ASSERT(isDenseArray()); - return js::HeapValueArray(slots); + return js::HeapValueArray(elements); } inline const js::Value & JSObject::getDenseArrayElement(uintN idx) { JS_ASSERT(isDenseArray() && idx < getDenseArrayInitializedLength()); - return slots[idx]; + return elements[idx]; } inline void JSObject::setDenseArrayElement(uintN idx, const js::Value &val) { JS_ASSERT(isDenseArray() && idx < getDenseArrayInitializedLength()); - slots[idx] = val; + elements[idx] = val; } inline void JSObject::initDenseArrayElement(uintN idx, const js::Value &val) { JS_ASSERT(isDenseArray() && idx < getDenseArrayInitializedLength()); - slots[idx].init(val); + elements[idx].init(val); } inline void @@ -633,30 +632,27 @@ JSObject::initDenseArrayElementWithType(JSContext *cx, uintN idx, const js::Valu inline void JSObject::copyDenseArrayElements(uintN dstStart, const js::Value *src, uintN count) { - JS_ASSERT(isDenseArray()); - JS_ASSERT(dstStart + count <= capacity); - prepareSlotRangeForOverwrite(dstStart, dstStart + count); - memcpy(slots + dstStart, src, count * sizeof(js::Value)); + JS_ASSERT(dstStart + count <= getDenseArrayCapacity()); + prepareElementRangeForOverwrite(dstStart, dstStart + count); + memcpy(elements + dstStart, src, count * sizeof(js::Value)); } inline void JSObject::initDenseArrayElements(uintN dstStart, const js::Value *src, uintN count) { - JS_ASSERT(isDenseArray()); - JS_ASSERT(dstStart + count <= capacity); - memcpy(slots + dstStart, src, count * sizeof(js::Value)); + JS_ASSERT(dstStart + count <= getDenseArrayCapacity()); + memcpy(elements + dstStart, src, count * sizeof(js::Value)); } inline void JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count) { - JS_ASSERT(isDenseArray()); - JS_ASSERT(dstStart + count <= capacity); - JS_ASSERT(srcStart + count <= capacity); + JS_ASSERT(dstStart + count <= getDenseArrayCapacity()); + JS_ASSERT(srcStart + count <= getDenseArrayCapacity()); /* * Use a custom write barrier here since it's performance sensitive. We - * only want to barrier the slots that are being overwritten. + * only want to barrier the elements that are being overwritten. */ uintN markStart, markEnd; if (dstStart > srcStart) { @@ -666,23 +662,16 @@ JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count) markStart = dstStart; markEnd = js::Min(dstStart + count, srcStart); } - prepareSlotRangeForOverwrite(markStart, markEnd); + prepareElementRangeForOverwrite(markStart, markEnd); - memmove(slots + dstStart, slots + srcStart, count * sizeof(js::Value)); -} - -inline void -JSObject::shrinkDenseArrayElements(JSContext *cx, uintN cap) -{ - JS_ASSERT(isDenseArray()); - shrinkSlots(cx, cap); + memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value)); } inline bool JSObject::denseArrayHasInlineSlots() const { - JS_ASSERT(isDenseArray() && slots); - return slots == fixedSlots(); + JS_ASSERT(isDenseArray()); + return elements == fixedElements(); } namespace js { @@ -713,91 +702,6 @@ JSObject::setDateUTCTime(const js::Value &time) setFixedSlot(JSSLOT_DATE_UTC_TIME, time); } -inline js::FlatClosureData * -JSObject::getFlatClosureData() const -{ -#ifdef DEBUG - JSFunction *fun = getFunctionPrivate(); - JS_ASSERT(fun->isFlatClosure()); - JS_ASSERT(fun->script()->bindings.countUpvars() == fun->script()->upvars()->length); -#endif - return (js::FlatClosureData *) getFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS).toPrivate(); -} - -inline void -JSObject::finalizeUpvarsIfFlatClosure() -{ - /* - * Cloned function objects may be flat closures with upvars to free. - * - * We do not record in the closure objects any flags. Rather we use flags - * stored in the compiled JSFunction that we get via getFunctionPrivate() - * to distinguish between closure types. Then during finalization we must - * ensure that the compiled JSFunction always finalized after the closures - * so we can safely access it here. Currently the GC ensures that through - * finalizing JSFunction instances after finalizing any other objects even - * during the background finalization. - * - * But we must not access JSScript here that is stored in JSFunction. The - * script can be finalized before the function or closure instances. So we - * just check if JSSLOT_FLAT_CLOSURE_UPVARS holds a private value encoded - * as a double. We must also ignore newborn closures that do not have the - * private pointer set. - * - * FIXME bug 648320 - allocate upvars on the GC heap to avoid doing it - * here explicitly. - */ - JSFunction *fun = getFunctionPrivate(); - if (fun && fun != this && fun->isFlatClosure()) { - const js::Value &v = getSlot(JSSLOT_FLAT_CLOSURE_UPVARS); - if (v.isDouble()) - js::Foreground::free_(v.toPrivate()); - } -} - -inline js::Value -JSObject::getFlatClosureUpvar(uint32 i) const -{ - JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars()); - return getFlatClosureData()->upvars[i]; -} - -inline const js::Value & -JSObject::getFlatClosureUpvar(uint32 i) -{ - JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars()); - return getFlatClosureData()->upvars[i]; -} - -inline void -JSObject::setFlatClosureUpvar(uint32 i, const js::Value &v) -{ - JS_ASSERT(i < getFunctionPrivate()->script()->bindings.countUpvars()); - getFlatClosureData()->upvars[i] = v; -} - -inline void -JSObject::setFlatClosureData(js::FlatClosureData *data) -{ - JS_ASSERT(isFunction()); - JS_ASSERT(getFunctionPrivate()->isFlatClosure()); - setFixedSlot(JSSLOT_FLAT_CLOSURE_UPVARS, js::PrivateValue(data)); -} - -inline bool -JSObject::hasMethodObj(const JSObject& obj) const -{ - return JSSLOT_FUN_METHOD_OBJ < numSlots() && - getFixedSlot(JSSLOT_FUN_METHOD_OBJ).isObject() && - getFixedSlot(JSSLOT_FUN_METHOD_OBJ).toObject() == obj; -} - -inline void -JSObject::setMethodObj(JSObject& obj) -{ - setFixedSlot(JSSLOT_FUN_METHOD_OBJ, js::ObjectValue(obj)); -} - inline js::NativeIterator * JSObject::getNativeIterator() const { @@ -915,12 +819,15 @@ JSObject::setSingletonType(JSContext *cx) if (!cx->typeInferenceEnabled()) return true; - JS_ASSERT(!lastProp->previous()); + JS_ASSERT(!lastProperty()->previous()); JS_ASSERT(!hasLazyType()); JS_ASSERT_IF(getProto(), type() == getProto()->getNewType(cx, NULL)); - flags |= SINGLETON_TYPE | LAZY_TYPE; + js::types::TypeObject *type = cx->compartment->getLazyType(cx, getProto()); + if (!type) + return false; + type_ = type; return true; } @@ -932,38 +839,17 @@ JSObject::getType(JSContext *cx) return type_; } -inline js::types::TypeObject * -JSObject::getNewType(JSContext *cx, JSFunction *fun, bool markUnknown) -{ - if (isDenseArray() && !makeDenseArraySlow(cx)) - return NULL; - if (newType) { - /* - * If set, the newType's newScript indicates the script used to create - * all objects in existence which have this type. If there are objects - * in existence which are not created by calling 'new' on newScript, - * we must clear the new script information from the type and will not - * be able to assume any definite properties for instances of the type. - * This case is rare, but can happen if, for example, two scripted - * functions have the same value for their 'prototype' property, or if - * Object.create is called with a prototype object that is also the - * 'prototype' property of some scripted function. - */ - if (newType->newScript && newType->newScript->fun != fun) - newType->clearNewScript(cx); - if (markUnknown && cx->typeInferenceEnabled() && !newType->unknownProperties()) - newType->markUnknown(cx); - } else { - makeNewType(cx, fun, markUnknown); - } - return newType; -} - -inline void -JSObject::clearType() +inline bool +JSObject::clearType(JSContext *cx) { JS_ASSERT(!hasSingletonType()); - type_ = &js::types::emptyTypeObject; + + js::types::TypeObject *type = cx->compartment->getEmptyType(cx); + if (!type) + return false; + + type_ = type; + return true; } inline void @@ -974,97 +860,249 @@ JSObject::setType(js::types::TypeObject *newType) for (JSObject *obj = newType->proto; obj; obj = obj->getProto()) JS_ASSERT(obj != this); #endif - JS_ASSERT_IF(hasSpecialEquality(), newType->hasAnyFlags(js::types::OBJECT_FLAG_SPECIAL_EQUALITY)); + JS_ASSERT_IF(hasSpecialEquality(), + newType->hasAnyFlags(js::types::OBJECT_FLAG_SPECIAL_EQUALITY)); JS_ASSERT(!hasSingletonType()); type_ = newType; } -inline void -JSObject::earlyInit(jsuword capacity) +inline bool JSObject::setIteratedSingleton(JSContext *cx) { - this->capacity = capacity; + return setFlag(cx, js::BaseShape::ITERATED_SINGLETON); +} - /* Stops obj from being scanned until initializated. */ - lastProp.init(NULL); +inline bool JSObject::isSystem() const +{ + return lastProperty()->hasObjectFlag(js::BaseShape::SYSTEM); +} + +inline bool JSObject::setSystem(JSContext *cx) +{ + return setFlag(cx, js::BaseShape::SYSTEM); +} + +inline bool JSObject::isDelegate() const +{ + return lastProperty()->hasObjectFlag(js::BaseShape::DELEGATE); +} + +inline bool JSObject::setDelegate(JSContext *cx) +{ + return setFlag(cx, js::BaseShape::DELEGATE, GENERATE_SHAPE); +} + +inline bool JSObject::setIndexed(JSContext *cx) +{ + return setFlag(cx, js::BaseShape::INDEXED); +} + +inline bool JSObject::isVarObj() const +{ + return lastProperty()->hasObjectFlag(js::BaseShape::VAROBJ); +} + +inline bool JSObject::setVarObj(JSContext *cx) +{ + return setFlag(cx, js::BaseShape::VAROBJ); +} + +inline bool JSObject::setWatched(JSContext *cx) +{ + return setFlag(cx, js::BaseShape::WATCHED, GENERATE_SHAPE); +} + +inline bool JSObject::hasUncacheableProto() const +{ + return lastProperty()->hasObjectFlag(js::BaseShape::UNCACHEABLE_PROTO); +} + +inline bool JSObject::setUncacheableProto(JSContext *cx) +{ + return setFlag(cx, js::BaseShape::UNCACHEABLE_PROTO, GENERATE_SHAPE); +} + +inline bool JSObject::isExtensible() const +{ + return !lastProperty()->hasObjectFlag(js::BaseShape::NOT_EXTENSIBLE); +} + +inline bool JSObject::isBoundFunction() const +{ + return lastProperty()->hasObjectFlag(js::BaseShape::BOUND_FUNCTION); +} + +inline bool JSObject::isIndexed() const +{ + return lastProperty()->hasObjectFlag(js::BaseShape::INDEXED); +} + +inline bool JSObject::watched() const +{ + return lastProperty()->hasObjectFlag(js::BaseShape::WATCHED); +} + +inline bool JSObject::hasSpecialEquality() const +{ + return !!getClass()->ext.equality; +} + +inline bool JSObject::isArguments() const { return isNormalArguments() || isStrictArguments(); } +inline bool JSObject::isArrayBuffer() const { return hasClass(&js::ArrayBufferClass); } +inline bool JSObject::isNormalArguments() const { return hasClass(&js::NormalArgumentsObjectClass); } +inline bool JSObject::isStrictArguments() const { return hasClass(&js::StrictArgumentsObjectClass); } +inline bool JSObject::isNumber() const { return hasClass(&js::NumberClass); } +inline bool JSObject::isBoolean() const { return hasClass(&js::BooleanClass); } +inline bool JSObject::isString() const { return hasClass(&js::StringClass); } +inline bool JSObject::isPrimitive() const { return isNumber() || isString() || isBoolean(); } +inline bool JSObject::isDate() const { return hasClass(&js::DateClass); } +inline bool JSObject::isFunction() const { return hasClass(&js::FunctionClass); } +inline bool JSObject::isObject() const { return hasClass(&js::ObjectClass); } +inline bool JSObject::isWith() const { return hasClass(&js::WithClass); } +inline bool JSObject::isBlock() const { return hasClass(&js::BlockClass); } +inline bool JSObject::isStaticBlock() const { return isBlock() && !getProto(); } +inline bool JSObject::isClonedBlock() const { return isBlock() && !!getProto(); } +inline bool JSObject::isCall() const { return hasClass(&js::CallClass); } +inline bool JSObject::isDeclEnv() const { return hasClass(&js::DeclEnvClass); } +inline bool JSObject::isRegExp() const { return hasClass(&js::RegExpClass); } +inline bool JSObject::isScript() const { return hasClass(&js::ScriptClass); } +inline bool JSObject::isGenerator() const { return hasClass(&js::GeneratorClass); } +inline bool JSObject::isIterator() const { return hasClass(&js::IteratorClass); } +inline bool JSObject::isStopIteration() const { return hasClass(&js::StopIterationClass); } +inline bool JSObject::isError() const { return hasClass(&js::ErrorClass); } +inline bool JSObject::isXML() const { return hasClass(&js::XMLClass); } +inline bool JSObject::isNamespace() const { return hasClass(&js::NamespaceClass); } +inline bool JSObject::isWeakMap() const { return hasClass(&js::WeakMapClass); } +inline bool JSObject::isFunctionProxy() const { return hasClass(&js::FunctionProxyClass); } + +inline bool JSObject::isArray() const +{ + return isSlowArray() || isDenseArray(); +} + +inline bool JSObject::isDenseArray() const +{ + bool result = hasClass(&js::ArrayClass); + JS_ASSERT_IF(result, elements != js::emptyObjectElements); + return result; +} + +inline bool JSObject::isSlowArray() const +{ + bool result = hasClass(&js::SlowArrayClass); + JS_ASSERT_IF(result, elements != js::emptyObjectElements); + return result; +} + +inline bool +JSObject::isXMLId() const +{ + return hasClass(&js::QNameClass) + || hasClass(&js::AttributeNameClass) + || hasClass(&js::AnyNameClass); +} + +inline bool +JSObject::isQName() const +{ + return hasClass(&js::QNameClass) + || hasClass(&js::AttributeNameClass) + || hasClass(&js::AnyNameClass); } inline void -JSObject::initType(js::types::TypeObject *newType) +JSObject::initializeSlotRange(size_t start, size_t length) { -#ifdef DEBUG - JS_ASSERT(newType); - for (JSObject *obj = newType->proto; obj; obj = obj->getProto()) - JS_ASSERT(obj != this); -#endif - JS_ASSERT_IF(hasSpecialEquality(), newType->hasAnyFlags(js::types::OBJECT_FLAG_SPECIAL_EQUALITY)); - JS_ASSERT(!hasSingletonType()); - type_.init(newType); -} - -inline void -JSObject::init(JSContext *cx, js::Class *aclasp, js::types::TypeObject *type, - JSObject *parent, void *priv, bool denseArray) -{ - clasp = aclasp; - flags = capacity << FIXED_SLOTS_SHIFT; - - JS_ASSERT(denseArray == (aclasp == &js::ArrayClass)); - -#ifdef DEBUG /* - * NB: objShape must not be set here; rather, the caller must call setMap - * or setSharedNonNativeMap after calling init. To defend this requirement - * we set objShape to a value that obj->shape() is asserted never to return. + * No bounds check, as this is used when the object's shape does not + * reflect its allocated slots (updateSlotsForSpan). */ - objShape = INVALID_SHAPE; -#endif - - privateData = priv; - - /* - * Fill the fixed slots with undefined if needed. This object must - * already have its capacity filled in, as by js_NewGCObject. If inference - * is disabled, NewArray will backfill holes up to the array's capacity - * and unset the PACKED_ARRAY flag. - */ - slots = NULL; - if (denseArray) { - slots = fixedSlots(); - flags |= PACKED_ARRAY; + JS_ASSERT(!isDenseArray()); + size_t fixed = numFixedSlots(); + if (start < fixed) { + if (start + length < fixed) { + js::InitValueRange(fixedSlots() + start, length, false); + } else { + size_t localClear = fixed - start; + js::InitValueRange(fixedSlots() + start, localClear, false); + js::InitValueRange(slots, length - localClear, false); + } } else { - js::InitValueRange(fixedSlots(), capacity, denseArray); + js::InitValueRange(slots + start - fixed, length, false); } +} - newType.init(NULL); - initType(type); - initParent(parent); +/* static */ inline JSObject * +JSObject::create(JSContext *cx, js::gc::AllocKind kind, + js::Shape *shape, js::types::TypeObject *type, js::HeapValue *slots) +{ + /* + * Callers must use dynamicSlotsCount to size the initial slot array of the + * object. We can't check the allocated capacity of the dynamic slots, but + * make sure their presence is consistent with the shape. + */ + JS_ASSERT(shape && type); + JS_ASSERT(!!dynamicSlotsCount(shape->numFixedSlots(), shape->slotSpan()) == !!slots); + JS_ASSERT(js::gc::GetGCKindSlots(kind, shape->getObjectClass()) == shape->numFixedSlots()); + + JSObject *obj = js_NewGCObject(cx, kind); + if (!obj) + return NULL; + + obj->shape_.init(shape); + obj->type_.init(type); + obj->slots = slots; + obj->elements = js::emptyObjectElements; + + if (shape->getObjectClass()->hasPrivate()) + obj->privateRef(shape->numFixedSlots()) = NULL; + + if (size_t span = shape->slotSpan()) + obj->initializeSlotRange(0, span); + + return obj; +} + +/* static */ inline JSObject * +JSObject::createDenseArray(JSContext *cx, js::gc::AllocKind kind, + js::Shape *shape, js::types::TypeObject *type, uint32 length) +{ + JS_ASSERT(shape && type); + JS_ASSERT(shape->getObjectClass() == &js::ArrayClass); + + /* + * Dense arrays are non-native, and never have properties to store. + * The number of fixed slots in the shape of such objects is zero. + */ + JS_ASSERT(shape->numFixedSlots() == 0); + + /* + * The array initially stores its elements inline, there must be enough + * space for an elements header. + */ + JS_ASSERT(js::gc::GetGCKindSlots(kind) >= js::ObjectElements::VALUES_PER_HEADER); + + uint32 capacity = js::gc::GetGCKindSlots(kind) - js::ObjectElements::VALUES_PER_HEADER; + + JSObject *obj = js_NewGCObject(cx, kind); + if (!obj) + return NULL; + + obj->shape_.init(shape); + obj->type_.init(type); + obj->slots = NULL; + obj->setFixedElements(); + new (obj->getElementsHeader()) js::ObjectElements(capacity, length); + + return obj; } inline void JSObject::finish(JSContext *cx) { - if (hasSlotsArray()) + if (hasDynamicSlots()) cx->free_(slots); -} - -inline bool -JSObject::initSharingEmptyShape(JSContext *cx, - js::Class *aclasp, - js::types::TypeObject *type, - JSObject *parent, - void *privateValue, - js::gc::AllocKind kind) -{ - init(cx, aclasp, type, parent, privateValue, false); - - JS_ASSERT(!isDenseArray()); - - js::EmptyShape *empty = type->getEmptyShape(cx, aclasp, kind); - if (!empty) - return false; - - initMap(empty); - return true; + if (hasDynamicElements()) + cx->free_(getElementsHeader()); } inline bool @@ -1097,7 +1135,9 @@ JSObject::principals(JSContext *cx) inline uint32 JSObject::slotSpan() const { - return lastProp->slotSpan; + if (inDictionaryMode()) + return lastProperty()->base()->slotSpan(); + return lastProperty()->slotSpan(); } inline bool @@ -1106,22 +1146,6 @@ JSObject::containsSlot(uint32 slot) const return slot < slotSpan(); } -inline void -JSObject::setMap(js::Shape *amap) -{ - JS_ASSERT(!hasOwnShape()); - lastProp = amap; - objShape = lastProp->shapeid; -} - -inline void -JSObject::initMap(js::Shape *amap) -{ - JS_ASSERT(!hasOwnShape()); - lastProp.init(amap); - objShape = lastProp->shapeid; -} - inline js::HeapValue & JSObject::nativeGetSlotRef(uintN slot) { @@ -1138,6 +1162,22 @@ JSObject::nativeGetSlot(uintN slot) const return getSlot(slot); } +inline JSFunction * +JSObject::nativeGetMethod(const js::Shape *shape) const +{ + /* + * For method shapes, this object must have an uncloned function object in + * the shape's slot. + */ + JS_ASSERT(shape->isMethod()); +#ifdef DEBUG + JSObject *obj = &nativeGetSlot(shape->slot()).toObject(); + JS_ASSERT(obj->isFunction() && !obj->toFunction()->isClonedMethod()); +#endif + + return static_cast(&nativeGetSlot(shape->slot()).toObject()); +} + inline void JSObject::nativeSetSlot(uintN slot, const js::Value &value) { @@ -1149,40 +1189,20 @@ JSObject::nativeSetSlot(uintN slot, const js::Value &value) inline void JSObject::nativeSetSlotWithType(JSContext *cx, const js::Shape *shape, const js::Value &value) { - nativeSetSlot(shape->slot, value); - js::types::AddTypePropertyId(cx, this, shape->propid, value); + nativeSetSlot(shape->slot(), value); + js::types::AddTypePropertyId(cx, this, shape->propid(), value); } inline bool JSObject::isNative() const { - return lastProp->isNative(); -} - -inline bool -JSObject::isNewborn() const -{ - return !lastProp; -} - -inline void -JSObject::clearOwnShape() -{ - flags &= ~OWN_SHAPE; - objShape = lastProp->shapeid; -} - -inline void -JSObject::setOwnShape(uint32 s) -{ - flags |= OWN_SHAPE; - objShape = s; + return lastProperty()->isNative(); } inline js::Shape ** JSObject::nativeSearch(JSContext *cx, jsid id, bool adding) { - return js::Shape::search(cx, &lastProp, id, adding); + return js::Shape::search(cx, &shape_, id, adding); } inline const js::Shape * @@ -1201,15 +1221,7 @@ JSObject::nativeContains(JSContext *cx, jsid id) inline bool JSObject::nativeContains(JSContext *cx, const js::Shape &shape) { - return nativeLookup(cx, shape.propid) == &shape; -} - -inline const js::Shape * -JSObject::lastProperty() const -{ - JS_ASSERT(isNative()); - JS_ASSERT(!JSID_IS_VOID(lastProp->propid)); - return lastProp; + return nativeLookup(cx, shape.propid()) == &shape; } inline bool @@ -1236,33 +1248,33 @@ JSObject::hasPropertyTable() const return lastProperty()->hasTable(); } -/* - * FIXME: shape must not be null, should use a reference here and other places. - */ -inline void -JSObject::setLastProperty(const js::Shape *shape) +inline size_t +JSObject::structSize() const { - JS_ASSERT(!inDictionaryMode()); - JS_ASSERT(!JSID_IS_VOID(shape->propid)); - JS_ASSERT_IF(lastProp, !JSID_IS_VOID(lastProp->propid)); - JS_ASSERT(shape->compartment() == compartment()); - - lastProp = const_cast(shape); + return arenaHeader()->getThingSize(); } -inline void -JSObject::removeLastProperty() +inline size_t +JSObject::slotsAndStructSize() const { - JS_ASSERT(!inDictionaryMode()); - JS_ASSERT(!JSID_IS_VOID(lastProp->parent->propid)); - - lastProp = lastProp->parent; + return structSize() + dynamicSlotSize(NULL); } -inline void -JSObject::setSharedNonNativeMap() +inline size_t +JSObject::dynamicSlotSize(JSMallocSizeOfFun mallocSizeOf) const { - setMap(&js::Shape::sharedNonNative); + size_t size = 0; + if (hasDynamicSlots()) { + size_t bytes = numDynamicSlots() * sizeof(js::Value); + size += mallocSizeOf ? mallocSizeOf(slots, bytes) : bytes; + } + if (hasDynamicElements()) { + size_t bytes = + (js::ObjectElements::VALUES_PER_HEADER + + getElementsHeader()->capacity) * sizeof(js::Value); + size += mallocSizeOf ? mallocSizeOf(getElementsHeader(), bytes) : bytes; + } + return size; } inline JSBool @@ -1431,6 +1443,15 @@ JSObject::isWrapper() const return js::IsWrapper(this); } +inline js::GlobalObject * +JSObject::getGlobal() const +{ + JSObject *obj = const_cast(this); + while (JSObject *parent = obj->getParent()) + obj = parent; + return obj->asGlobal(); +} + static inline bool js_IsCallable(const js::Value &v) { @@ -1446,6 +1467,69 @@ js_UnwrapWithObject(JSContext *cx, JSObject *withobj) namespace js { +inline void +OBJ_TO_INNER_OBJECT(JSContext *cx, JSObject *&obj) +{ + if (JSObjectOp op = obj->getClass()->ext.innerObject) + obj = op(cx, obj); +} + +inline void +OBJ_TO_OUTER_OBJECT(JSContext *cx, JSObject *&obj) +{ + if (JSObjectOp op = obj->getClass()->ext.outerObject) + obj = op(cx, obj); +} + +/* + * Methods to test whether an object or a value is of type "xml" (per typeof). + */ + +#define VALUE_IS_XML(v) (!JSVAL_IS_PRIMITIVE(v) && JSVAL_TO_OBJECT(v)->isXML()) + +static inline bool +IsXML(const js::Value &v) +{ + return v.isObject() && v.toObject().isXML(); +} + +static inline bool +IsStopIteration(const js::Value &v) +{ + return v.isObject() && v.toObject().isStopIteration(); +} + +/* ES5 9.1 ToPrimitive(input). */ +static JS_ALWAYS_INLINE bool +ToPrimitive(JSContext *cx, Value *vp) +{ + if (vp->isPrimitive()) + return true; + return vp->toObject().defaultValue(cx, JSTYPE_VOID, vp); +} + +/* ES5 9.1 ToPrimitive(input, PreferredType). */ +static JS_ALWAYS_INLINE bool +ToPrimitive(JSContext *cx, JSType preferredType, Value *vp) +{ + JS_ASSERT(preferredType != JSTYPE_VOID); /* Use the other ToPrimitive! */ + if (vp->isPrimitive()) + return true; + return vp->toObject().defaultValue(cx, preferredType, vp); +} + +/* + * Return true if this is a compiler-created internal function accessed by + * its own object. Such a function object must not be accessible to script + * or embedding code. + */ +inline bool +IsInternalFunctionObject(JSObject *funobj) +{ + JSFunction *fun = funobj->toFunction(); + return (fun->flags & JSFUN_LAMBDA) && !funobj->getParent(); +} + class AutoPropDescArrayRooter : private AutoGCRooter { public: @@ -1494,31 +1578,100 @@ class AutoPropertyDescriptorRooter : private AutoGCRooter, public PropertyDescri friend void AutoGCRooter::trace(JSTracer *trc); }; -static inline js::EmptyShape * -InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, js::types::TypeObject *type, - gc::AllocKind kind) +inline bool +NewObjectCache::lookup(Class *clasp, gc::Cell *key, gc::AllocKind kind, EntryIndex *pentry) { - JS_ASSERT(clasp->isNative()); + jsuword hash = (jsuword(clasp) ^ jsuword(key)) + kind; + *pentry = hash % js::ArrayLength(entries); - /* Share proto's emptyShape only if obj is similar to proto. */ - js::EmptyShape *empty = NULL; + Entry *entry = &entries[*pentry]; - uint32 freeslot = JSSLOT_FREE(clasp); - if (freeslot > obj->numSlots() && !obj->allocSlots(cx, freeslot)) - goto bad; + /* N.B. Lookups with the same clasp/key but different kinds map to different entries. */ + return (entry->clasp == clasp && entry->key == key); +} - if (type->canProvideEmptyShape(clasp)) - empty = type->getEmptyShape(cx, clasp, kind); - else - empty = js::EmptyShape::create(cx, clasp); - if (!empty) - goto bad; +inline bool +NewObjectCache::lookupProto(Class *clasp, JSObject *proto, gc::AllocKind kind, EntryIndex *pentry) +{ + JS_ASSERT(!proto->isGlobal()); + return lookup(clasp, proto, kind, pentry); +} - return empty; +inline bool +NewObjectCache::lookupGlobal(Class *clasp, js::GlobalObject *global, gc::AllocKind kind, EntryIndex *pentry) +{ + return lookup(clasp, global, kind, pentry); +} + +inline bool +NewObjectCache::lookupType(Class *clasp, js::types::TypeObject *type, gc::AllocKind kind, EntryIndex *pentry) +{ + return lookup(clasp, type, kind, pentry); +} + +inline void +NewObjectCache::fill(EntryIndex entry_, Class *clasp, gc::Cell *key, gc::AllocKind kind, JSObject *obj) +{ + JS_ASSERT(unsigned(entry_) < ArrayLength(entries)); + Entry *entry = &entries[entry_]; + + JS_ASSERT(!obj->hasDynamicSlots() && !obj->hasDynamicElements()); + + entry->clasp = clasp; + entry->key = key; + entry->kind = kind; + + entry->nbytes = obj->structSize(); + memcpy(&entry->templateObject, obj, entry->nbytes); +} + +inline void +NewObjectCache::fillProto(EntryIndex entry, Class *clasp, JSObject *proto, gc::AllocKind kind, JSObject *obj) +{ + JS_ASSERT(!proto->isGlobal()); + JS_ASSERT(obj->getProto() == proto); + return fill(entry, clasp, proto, kind, obj); +} + +inline void +NewObjectCache::fillGlobal(EntryIndex entry, Class *clasp, js::GlobalObject *global, gc::AllocKind kind, JSObject *obj) +{ + //JS_ASSERT(global == obj->getGlobal()); + return fill(entry, clasp, global, kind, obj); +} + +inline void +NewObjectCache::fillType(EntryIndex entry, Class *clasp, js::types::TypeObject *type, gc::AllocKind kind, JSObject *obj) +{ + JS_ASSERT(obj->type() == type); + return fill(entry, clasp, type, kind, obj); +} + +inline JSObject * +NewObjectCache::newObjectFromHit(JSContext *cx, EntryIndex entry_) +{ + JS_ASSERT(unsigned(entry_) < ArrayLength(entries)); + Entry *entry = &entries[entry_]; + + JSObject *obj = js_TryNewGCObject(cx, entry->kind); + if (obj) { + memcpy(obj, &entry->templateObject, entry->nbytes); + Probes::createObject(cx, obj); + return obj; + } + + /* Copy the entry to the stack first in case it is purged by a GC. */ + size_t nbytes = entry->nbytes; + JSObject_Slots16 stackObject; + memcpy(&stackObject, &entry->templateObject, nbytes); + + obj = js_NewGCObject(cx, entry->kind); + if (obj) { + memcpy(obj, &stackObject, nbytes); + Probes::createObject(cx, obj); + return obj; + } - bad: - /* The GC nulls map initially. It should still be null on error. */ - JS_ASSERT(obj->isNewborn()); return NULL; } @@ -1534,66 +1687,92 @@ CanBeFinalizedInBackground(gc::AllocKind kind, Class *clasp) * IsBackgroundAllocKind is called to prevent recursively incrementing * the finalize kind; kind may already be a background finalize kind. */ - if (!gc::IsBackgroundAllocKind(kind) && - (!clasp->finalize || clasp->flags & JSCLASS_CONCURRENT_FINALIZER)) { + if (!gc::IsBackgroundAllocKind(kind) && !clasp->finalize) return true; - } #endif return false; } /* - * Helper optimized for creating a native instance of the given class (not the - * class's prototype object). Use this in preference to NewObject, but use - * NewBuiltinClassInstance if you need the default class prototype as proto, - * and its parent global as parent. + * Make an object with the specified prototype. If parent is null, it will + * default to the prototype's global if the prototype is non-null. */ -static inline JSObject * -NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto, - JSObject *parent, gc::AllocKind kind) +JSObject * +NewObjectWithGivenProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent, + gc::AllocKind kind); + +inline JSObject * +NewObjectWithGivenProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent) { - JS_ASSERT(proto); - JS_ASSERT(parent); - JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST); - - types::TypeObject *type = proto->getNewType(cx); - if (!type) - return NULL; - - /* - * Allocate an object from the GC heap and initialize all its fields before - * doing any operation that can potentially trigger GC. - */ - - if (CanBeFinalizedInBackground(kind, clasp)) - kind = GetBackgroundAllocKind(kind); - - JSObject* obj = js_NewGCObject(cx, kind); - - if (obj) { - /* - * Default parent to the parent of the prototype, which was set from - * the parent of the prototype's constructor. - */ - bool denseArray = (clasp == &ArrayClass); - obj->init(cx, clasp, type, parent, NULL, denseArray); - - JS_ASSERT(type->canProvideEmptyShape(clasp)); - js::EmptyShape *empty = type->getEmptyShape(cx, clasp, kind); - if (empty) - obj->initMap(empty); - else - obj = NULL; - } - - return obj; + gc::AllocKind kind = gc::GetGCObjectKind(clasp); + return NewObjectWithGivenProto(cx, clasp, proto, parent, kind); } -static inline JSObject * -NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto, JSObject *parent) +inline JSProtoKey +GetClassProtoKey(js::Class *clasp) { - gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp)); - return NewNativeClassInstance(cx, clasp, proto, parent, kind); + JSProtoKey key = JSCLASS_CACHED_PROTO_KEY(clasp); + if (key != JSProto_Null) + return key; + if (clasp->flags & JSCLASS_IS_ANONYMOUS) + return JSProto_Object; + return JSProto_Null; +} + +inline bool +FindProto(JSContext *cx, js::Class *clasp, JSObject *parent, JSObject ** proto) +{ + JSProtoKey protoKey = GetClassProtoKey(clasp); + if (!js_GetClassPrototype(cx, parent, protoKey, proto, clasp)) + return false; + if (!(*proto) && !js_GetClassPrototype(cx, parent, JSProto_Object, proto)) + return false; + return true; +} + +/* + * Make an object with the prototype set according to the specified prototype or class: + * + * if proto is non-null: + * use the specified proto + * for a built-in class: + * use the memoized original value of the class constructor .prototype + * property object + * else if available + * the current value of .prototype + * else + * Object.prototype. + * + * The class prototype will be fetched from the parent's global. If global is + * null, the context's active global will be used, and the resulting object's + * parent will be that global. + */ +JSObject * +NewObjectWithClassProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent, + gc::AllocKind kind); + +inline JSObject * +NewObjectWithClassProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent) +{ + gc::AllocKind kind = gc::GetGCObjectKind(clasp); + return NewObjectWithClassProto(cx, clasp, proto, parent, kind); +} + +/* + * Create a native instance of the given class with parent and proto set + * according to the context's active global. + */ +inline JSObject * +NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::AllocKind kind) +{ + return NewObjectWithClassProto(cx, clasp, NULL, NULL, kind); +} + +inline JSObject * +NewBuiltinClassInstance(JSContext *cx, Class *clasp) +{ + gc::AllocKind kind = gc::GetGCObjectKind(clasp); + return NewBuiltinClassInstance(cx, clasp, kind); } inline GlobalObject * @@ -1607,256 +1786,39 @@ bool FindClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey, JSObject **protop, Class *clasp); -/* - * Helper used to create Boolean, Date, RegExp, etc. instances of built-in - * classes with class prototypes of the same Class. See, e.g., jsdate.cpp, - * jsregexp.cpp, and js_PrimitiveToObject in jsobj.cpp. Use this to get the - * right default proto and parent for clasp in cx. - */ -static inline JSObject * -NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::AllocKind kind) -{ - VOUCH_DOES_NOT_REQUIRE_STACK(); - - JSProtoKey protoKey = JSCLASS_CACHED_PROTO_KEY(clasp); - JS_ASSERT(protoKey != JSProto_Null); - - /* NB: inline-expanded and specialized version of js_GetClassPrototype. */ - JSObject *global; - if (!cx->hasfp()) { - global = JS_ObjectToInnerObject(cx, cx->globalObject); - if (!global) - return NULL; - } else { - global = cx->fp()->scopeChain().getGlobal(); - } - JS_ASSERT(global->isGlobal()); - - const Value &v = global->getReservedSlot(JSProto_LIMIT + protoKey); - JSObject *proto; - if (v.isObject()) { - proto = &v.toObject(); - JS_ASSERT(proto->getParent() == global); - } else { - if (!FindClassPrototype(cx, global, protoKey, &proto, clasp)) - return NULL; - } - - return NewNativeClassInstance(cx, clasp, proto, global, kind); -} - -static inline JSObject * -NewBuiltinClassInstance(JSContext *cx, Class *clasp) -{ - gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp)); - return NewBuiltinClassInstance(cx, clasp, kind); -} - -static inline JSProtoKey -GetClassProtoKey(js::Class *clasp) -{ - JSProtoKey key = JSCLASS_CACHED_PROTO_KEY(clasp); - if (key != JSProto_Null) - return key; - if (clasp->flags & JSCLASS_IS_ANONYMOUS) - return JSProto_Object; - return JSProto_Null; -} - -namespace WithProto { - enum e { - Class = 0, - Given = 1 - }; -} - -/* - * Create an instance of any class, native or not, JSFunction-sized or not. - * - * If withProto is 'Class': - * If proto is null: - * for a built-in class: - * use the memoized original value of the class constructor .prototype - * property object - * else if available - * the current value of .prototype - * else - * Object.prototype. - * - * If parent is null, default it to proto->getParent() if proto is non - * null, else to null. - * - * If withProto is 'Given': - * We allocate an object with exactly the given proto. A null parent - * defaults to proto->getParent() if proto is non-null (else to null). - * - * If isFunction is true, return a JSFunction-sized object. If isFunction is - * false, return a normal object. - * - * Note that as a template, there will be lots of instantiations, which means - * the internals will be specialized based on the template parameters. - */ -static JS_ALWAYS_INLINE bool -FindProto(JSContext *cx, js::Class *clasp, JSObject *parent, JSObject ** proto) -{ - JSProtoKey protoKey = GetClassProtoKey(clasp); - if (!js_GetClassPrototype(cx, parent, protoKey, proto, clasp)) - return false; - if (!(*proto) && !js_GetClassPrototype(cx, parent, JSProto_Object, proto)) - return false; - - return true; -} - -namespace detail -{ -template -static JS_ALWAYS_INLINE JSObject * -NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent, - gc::AllocKind kind) -{ - /* Bootstrap the ur-object, and make it the default prototype object. */ - if (withProto == WithProto::Class && !proto) { - if (!FindProto(cx, clasp, parent, &proto)) - return NULL; - } - - types::TypeObject *type = proto ? proto->getNewType(cx) : &js::types::emptyTypeObject; - if (!type) - return NULL; - - /* - * Allocate an object from the GC heap and initialize all its fields before - * doing any operation that can potentially trigger GC. Functions have a - * larger non-standard allocation size. - * - * The should be specialized by the template. - */ - - if (!isFunction && CanBeFinalizedInBackground(kind, clasp)) - kind = GetBackgroundAllocKind(kind); - - JSObject* obj = isFunction ? js_NewGCFunction(cx) : js_NewGCObject(cx, kind); - if (!obj) - goto out; - - /* This needs to match up with the size of JSFunction::data_padding. */ - JS_ASSERT_IF(isFunction, kind == gc::FINALIZE_OBJECT2); - - /* - * Default parent to the parent of the prototype, which was set from - * the parent of the prototype's constructor. - */ - obj->init(cx, clasp, type, - (!parent && proto) ? proto->getParent() : parent, - NULL, clasp == &ArrayClass); - - if (clasp->isNative()) { - js::EmptyShape *empty = InitScopeForObject(cx, obj, clasp, type, kind); - if (!empty) { - obj = NULL; - goto out; - } - obj->initMap(empty); - } else { - obj->setSharedNonNativeMap(); - } - -out: - Probes::createObject(cx, obj); - return obj; -} -} /* namespace detail */ - -static JS_ALWAYS_INLINE JSObject * -NewFunction(JSContext *cx, js::GlobalObject &global) -{ - JSObject *proto; - if (!js_GetClassPrototype(cx, &global, JSProto_Function, &proto)) - return NULL; - return detail::NewObject(cx, &FunctionClass, proto, &global, - gc::FINALIZE_OBJECT2); -} - -static JS_ALWAYS_INLINE JSObject * -NewFunction(JSContext *cx, JSObject *parent) -{ - return detail::NewObject(cx, &FunctionClass, NULL, parent, - gc::FINALIZE_OBJECT2); -} - -template -static JS_ALWAYS_INLINE JSObject * -NewNonFunction(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent, - gc::AllocKind kind) -{ - return detail::NewObject(cx, clasp, proto, parent, kind); -} - -template -static JS_ALWAYS_INLINE JSObject * -NewNonFunction(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent) -{ - gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp)); - return detail::NewObject(cx, clasp, proto, parent, kind); -} - -template -static JS_ALWAYS_INLINE JSObject * -NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent, - gc::AllocKind kind) -{ - if (clasp == &FunctionClass) - return detail::NewObject(cx, clasp, proto, parent, kind); - return detail::NewObject(cx, clasp, proto, parent, kind); -} - -template -static JS_ALWAYS_INLINE JSObject * -NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent) -{ - gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp)); - return NewObject(cx, clasp, proto, parent, kind); -} - /* * Create a plain object with the specified type. This bypasses getNewType to * avoid losing creation site information for objects made by scripted 'new'. */ -static JS_ALWAYS_INLINE JSObject * -NewObjectWithType(JSContext *cx, types::TypeObject *type, JSObject *parent, gc::AllocKind kind) +JSObject * +NewObjectWithType(JSContext *cx, types::TypeObject *type, JSObject *parent, gc::AllocKind kind); + +/* Make an object with pregenerated shape from a NEWOBJECT bytecode. */ +static inline JSObject * +CopyInitializerObject(JSContext *cx, JSObject *baseobj, types::TypeObject *type) { - JS_ASSERT(type == type->proto->newType); + JS_ASSERT(baseobj->getClass() == &ObjectClass); + JS_ASSERT(!baseobj->inDictionaryMode()); - if (CanBeFinalizedInBackground(kind, &ObjectClass)) - kind = GetBackgroundAllocKind(kind); + gc::AllocKind kind = gc::GetGCObjectFixedSlotsKind(baseobj->numFixedSlots()); +#ifdef JS_THREADSAFE + kind = gc::GetBackgroundAllocKind(kind); +#endif + JS_ASSERT(kind == baseobj->getAllocKind()); + JSObject *obj = NewBuiltinClassInstance(cx, &ObjectClass, kind); - JSObject* obj = js_NewGCObject(cx, kind); if (!obj) - goto out; + return NULL; - /* - * Default parent to the parent of the prototype, which was set from - * the parent of the prototype's constructor. - */ - obj->init(cx, &ObjectClass, type, - (!parent && type->proto) ? type->proto->getParent() : parent, - NULL, false); + obj->setType(type); - js::EmptyShape *empty; - empty = InitScopeForObject(cx, obj, &ObjectClass, type, kind); - if (!empty) { - obj = NULL; - goto out; - } - obj->initMap(empty); + if (!obj->setLastProperty(cx, baseobj->lastProperty())) + return NULL; -out: - Probes::createObject(cx, obj); return obj; } -extern JSObject * +JSObject * NewReshapedObject(JSContext *cx, js::types::TypeObject *type, JSObject *parent, gc::AllocKind kind, const Shape *shape); @@ -1866,11 +1828,19 @@ NewReshapedObject(JSContext *cx, js::types::TypeObject *type, JSObject *parent, * objects that do not require any fixed slots. */ static inline gc::AllocKind -GuessObjectGCKind(size_t numSlots, bool isArray) +GuessObjectGCKind(size_t numSlots) { if (numSlots) - return gc::GetGCObjectKind(numSlots, isArray); - return isArray ? gc::FINALIZE_OBJECT8 : gc::FINALIZE_OBJECT4; + return gc::GetGCObjectKind(numSlots); + return gc::FINALIZE_OBJECT4; +} + +static inline gc::AllocKind +GuessArrayGCKind(size_t numSlots) +{ + if (numSlots) + return gc::GetGCArrayKind(numSlots); + return gc::FINALIZE_OBJECT8; } /* @@ -1887,51 +1857,22 @@ NewObjectGCKind(JSContext *cx, js::Class *clasp) return gc::FINALIZE_OBJECT4; } -static JS_ALWAYS_INLINE JSObject* -NewObjectWithClassProto(JSContext *cx, Class *clasp, JSObject *proto, - gc::AllocKind kind) +/* + * Fill slots with the initial slot array to use for a newborn object which + * may or may not need dynamic slots. + */ +inline bool +PreallocateObjectDynamicSlots(JSContext *cx, Shape *shape, HeapValue **slots) { - JS_ASSERT(clasp->isNative()); - - types::TypeObject *type = proto->getNewType(cx); - if (!type) - return NULL; - - if (CanBeFinalizedInBackground(kind, clasp)) - kind = GetBackgroundAllocKind(kind); - - JSObject* obj = js_NewGCObject(cx, kind); - if (!obj) - return NULL; - - if (!obj->initSharingEmptyShape(cx, clasp, type, proto->getParent(), NULL, kind)) - return NULL; - return obj; -} - -/* Make an object with pregenerated shape from a NEWOBJECT bytecode. */ -static inline JSObject * -CopyInitializerObject(JSContext *cx, JSObject *baseobj, types::TypeObject *type) -{ - JS_ASSERT(baseobj->getClass() == &ObjectClass); - JS_ASSERT(!baseobj->inDictionaryMode()); - - gc::AllocKind kind = gc::GetGCObjectFixedSlotsKind(baseobj->numFixedSlots()); -#ifdef JS_THREADSAFE - kind = gc::GetBackgroundAllocKind(kind); -#endif - JS_ASSERT(kind == baseobj->getAllocKind()); - JSObject *obj = NewBuiltinClassInstance(cx, &ObjectClass, kind); - - if (!obj || !obj->ensureSlots(cx, baseobj->numSlots())) - return NULL; - - obj->setType(type); - obj->flags = baseobj->flags; - obj->lastProp = baseobj->lastProp; - obj->objShape = baseobj->objShape; - - return obj; + if (size_t count = JSObject::dynamicSlotsCount(shape->numFixedSlots(), shape->slotSpan())) { + *slots = (HeapValue *) cx->malloc_(count * sizeof(HeapValue)); + if (!*slots) + return false; + Debug_SetValueRangeToCrashOnTouch(*slots, count); + return true; + } + *slots = NULL; + return true; } inline bool @@ -2086,18 +2027,52 @@ ValueIsSpecial(JSObject *obj, Value *propval, SpecialId *sidp, JSContext *cx) return false; } +JSObject * +DefineConstructorAndPrototype(JSContext *cx, JSObject *obj, JSProtoKey key, JSAtom *atom, + JSObject *protoProto, Class *clasp, + Native constructor, uintN nargs, + JSPropertySpec *ps, JSFunctionSpec *fs, + JSPropertySpec *static_ps, JSFunctionSpec *static_fs, + JSObject **ctorp = NULL, + gc::AllocKind ctorKind = JSFunction::FinalizeKind); + } /* namespace js */ +extern JSObject * +js_InitClass(JSContext *cx, JSObject *obj, JSObject *parent_proto, + js::Class *clasp, JSNative constructor, uintN nargs, + JSPropertySpec *ps, JSFunctionSpec *fs, + JSPropertySpec *static_ps, JSFunctionSpec *static_fs, + JSObject **ctorp = NULL, + js::gc::AllocKind ctorKind = JSFunction::FinalizeKind); + inline JSObject * js_GetProtoIfDenseArray(JSObject *obj) { return obj->isDenseArray() ? obj->getProto() : obj; } +/* + * js_PurgeScopeChain does nothing if obj is not itself a prototype or parent + * scope, else it reshapes the scope and prototype chains it links. It calls + * js_PurgeScopeChainHelper, which asserts that obj is flagged as a delegate + * (i.e., obj has ever been on a prototype or parent chain). + */ +extern bool +js_PurgeScopeChainHelper(JSContext *cx, JSObject *obj, jsid id); + +inline bool +js_PurgeScopeChain(JSContext *cx, JSObject *obj, jsid id) +{ + if (obj->isDelegate()) + return js_PurgeScopeChainHelper(cx, obj, id); + return true; +} + inline void JSObject::setSlot(uintN slot, const js::Value &value) { - JS_ASSERT(slot < capacity); + JS_ASSERT(slotInRange(slot)); getSlotRef(slot).set(compartment(), value); } @@ -2105,14 +2080,14 @@ inline void JSObject::initSlot(uintN slot, const js::Value &value) { JS_ASSERT(getSlot(slot).isUndefined() || getSlot(slot).isMagic(JS_ARRAY_HOLE)); + JS_ASSERT(slotInRange(slot)); initSlotUnchecked(slot, value); } inline void JSObject::initSlotUnchecked(uintN slot, const js::Value &value) { - JS_ASSERT(slot < capacity); - getSlotRef(slot).init(value); + getSlotAddressUnchecked(slot)->init(value); } inline void @@ -2129,60 +2104,14 @@ JSObject::initFixedSlot(uintN slot, const js::Value &value) fixedSlots()[slot].init(value); } -inline void -JSObject::clearParent() -{ - parent.clear(); -} - -inline void -JSObject::setParent(JSObject *newParent) -{ -#ifdef DEBUG - for (JSObject *obj = newParent; obj; obj = obj->getParent()) - JS_ASSERT(obj != this); -#endif - setDelegateNullSafe(newParent); - parent = newParent; -} - -inline void -JSObject::initParent(JSObject *newParent) -{ - JS_ASSERT(isNewborn()); -#ifdef DEBUG - for (JSObject *obj = newParent; obj; obj = obj->getParent()) - JS_ASSERT(obj != this); -#endif - setDelegateNullSafe(newParent); - parent.init(newParent); -} - -inline void -JSObject::setPrivate(void *data) -{ - JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE); - - privateWriteBarrierPre(&privateData); - privateData = data; - privateWriteBarrierPost(&privateData); -} - -inline void -JSObject::initPrivate(void *data) -{ - JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE); - privateData = data; -} - inline void JSObject::privateWriteBarrierPre(void **old) { #ifdef JSGC_INCREMENTAL JSCompartment *comp = compartment(); if (comp->needsBarrier()) { - if (clasp->trace && *old) - clasp->trace(comp->barrierTracer(), this); + if (*old && getClass()->trace) + getClass()->trace(comp->barrierTracer(), this); } #endif } diff --git a/js/src/json.cpp b/js/src/json.cpp index 8def9d439f6d..a3e5d34af9d8 100644 --- a/js/src/json.cpp +++ b/js/src/json.cpp @@ -930,7 +930,7 @@ static JSFunctionSpec json_static_methods[] = { JSObject * js_InitJSONClass(JSContext *cx, JSObject *obj) { - JSObject *JSON = NewNonFunction(cx, &JSONClass, NULL, obj); + JSObject *JSON = NewObjectWithClassProto(cx, &JSONClass, NULL, obj); if (!JSON || !JSON->setSingletonType(cx)) return NULL; diff --git a/js/src/jsopcode.cpp b/js/src/jsopcode.cpp index 7f6386c74931..4ac7b69123f7 100644 --- a/js/src/jsopcode.cpp +++ b/js/src/jsopcode.cpp @@ -537,12 +537,12 @@ ToDisassemblySource(JSContext *cx, jsval v, JSAutoByteString *bytes) while (!r.empty()) { const Shape &shape = r.front(); JSAutoByteString bytes; - if (!js_AtomToPrintableString(cx, JSID_TO_ATOM(shape.propid), &bytes)) + if (!js_AtomToPrintableString(cx, JSID_TO_ATOM(shape.propid()), &bytes)) return false; r.popFront(); source = JS_sprintf_append(source, "%s: %d%s", - bytes.ptr(), shape.shortid, + bytes.ptr(), shape.shortid(), !r.empty() ? ", " : ""); if (!source) return false; @@ -556,8 +556,7 @@ ToDisassemblySource(JSContext *cx, jsval v, JSAutoByteString *bytes) } if (clasp == &FunctionClass) { - JSFunction *fun = obj->getFunctionPrivate(); - JSString *str = JS_DecompileFunction(cx, fun, JS_DONT_PRETTY_PRINT); + JSString *str = JS_DecompileFunction(cx, obj->toFunction(), JS_DONT_PRETTY_PRINT); if (!str) return false; return bytes->encode(cx, str); @@ -1546,11 +1545,33 @@ GetArgOrVarAtom(JSPrinter *jp, uintN slot) return name; } +#define LOCAL_ASSERT(expr) LOCAL_ASSERT_RV(expr, "") + +static const char * +GetLocalInSlot(SprintStack *ss, jsint i, jsint slot, JSObject *obj) +{ + for (Shape::Range r(obj->lastProperty()); !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + + if (shape.shortid() == slot) { + LOCAL_ASSERT(JSID_IS_ATOM(shape.propid())); + + JSAtom *atom = JSID_TO_ATOM(shape.propid()); + const char *rval = QuoteString(&ss->sprinter, atom, 0); + if (!rval) + return NULL; + + RETRACT(&ss->sprinter, rval); + return rval; + } + } + + return GetStr(ss, i); +} + const char * GetLocal(SprintStack *ss, jsint i) { -#define LOCAL_ASSERT(expr) LOCAL_ASSERT_RV(expr, "") - ptrdiff_t off = ss->offsets[i]; if (off >= 0) return OFF2STR(&ss->sprinter, off); @@ -1569,40 +1590,47 @@ GetLocal(SprintStack *ss, jsint i) if (!JSScript::isValidOffset(script->objectsOffset)) return GetStr(ss, i); - for (jsatomid j = 0, n = script->objects()->length; j != n; j++) { - JSObject *obj = script->getObject(j); - if (obj->isBlock()) { - jsint depth = OBJ_BLOCK_DEPTH(cx, obj); - jsint count = OBJ_BLOCK_COUNT(cx, obj); + // In case of a let variable, the stack points to a JSOP_ENTERBLOCK opcode. + // Get the object number from the block instead of iterating all objects and + // hoping the right object is found. + if (off <= -2 && ss->printer->pcstack) { + jsbytecode *pc = ss->printer->pcstack[-2 - off]; - if (jsuint(i - depth) < jsuint(count)) { - jsint slot = i - depth; + JS_ASSERT(ss->printer->script->code <= pc); + JS_ASSERT(pc < (ss->printer->script->code + ss->printer->script->length)); - for (Shape::Range r(obj->lastProperty()); !r.empty(); r.popFront()) { - const Shape &shape = r.front(); + if (JSOP_ENTERBLOCK == (JSOp)*pc) { + jsatomid j = js_GetIndexFromBytecode(ss->sprinter.context, + ss->printer->script, pc, 0); + JSObject *obj = script->getObject(j); - if (shape.shortid == slot) { - LOCAL_ASSERT(JSID_IS_ATOM(shape.propid)); + if (obj->isBlock()) { + jsint depth = OBJ_BLOCK_DEPTH(cx, obj); + jsint count = OBJ_BLOCK_COUNT(cx, obj); - JSAtom *atom = JSID_TO_ATOM(shape.propid); - const char *rval = QuoteString(&ss->sprinter, atom, 0); - if (!rval) - return NULL; - - RETRACT(&ss->sprinter, rval); - return rval; - } - } - - break; + if (jsuint(i - depth) < jsuint(count)) + return GetLocalInSlot(ss, i, jsint(i - depth), obj); } } } + // Iterate over all objects. + for (jsatomid j = 0, n = script->objects()->length; j != n; j++) { + JSObject *obj = script->getObject(j); + + if (obj->isBlock()) { + jsint depth = OBJ_BLOCK_DEPTH(cx, obj); + jsint count = OBJ_BLOCK_COUNT(cx, obj); + + if (jsuint(i - depth) < jsuint(count)) + return GetLocalInSlot(ss, i, jsint(i - depth), obj); + } + } + return GetStr(ss, i); +} #undef LOCAL_ASSERT -} static JSBool IsVarSlot(JSPrinter *jp, jsbytecode *pc, jsint *indexp) @@ -2020,8 +2048,8 @@ GetBlockNames(JSContext *cx, JSObject *blockObj, AtomVector *atoms) const Shape &shape = r.front(); LOCAL_ASSERT(shape.hasShortID()); --i; - LOCAL_ASSERT((uintN)shape.shortid == i); - (*atoms)[i] = JSID_TO_ATOM(shape.propid); + LOCAL_ASSERT((uintN)shape.shortid() == i); + (*atoms)[i] = JSID_TO_ATOM(shape.propid()); } LOCAL_ASSERT(i == 0); @@ -2291,7 +2319,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb) uint32 format = cs->format; if (((fp && pc == fp->pcQuadratic(cx)) || (pc == startpc && nuses != 0)) && - format & (JOF_SET|JOF_DEL|JOF_INCDEC|JOF_FOR|JOF_VARPROP)) { + format & (JOF_SET|JOF_DEL|JOF_INCDEC|JOF_VARPROP)) { uint32 mode = JOF_MODE(format); if (mode == JOF_NAME) { /* diff --git a/js/src/jsopcode.h b/js/src/jsopcode.h index 0d3b3cdc668c..bd2f8e4caa9b 100644 --- a/js/src/jsopcode.h +++ b/js/src/jsopcode.h @@ -109,7 +109,6 @@ typedef enum JSOp { #define JOF_INC (2U<<10) /* increment (++, not --) opcode */ #define JOF_INCDEC (3U<<10) /* increment or decrement opcode */ #define JOF_POST (1U<<12) /* postorder increment or decrement */ -#define JOF_FOR (1U<<13) /* for-in property op (akin to JOF_SET) */ #define JOF_ASSIGNING JOF_SET /* hint for Class.resolve, used for ops that do simplex assignment */ #define JOF_DETECTING (1U<<14) /* object detection for JSNewResolveOp */ diff --git a/js/src/jsopcode.tbl b/js/src/jsopcode.tbl index 5c91db39a89f..cd1467631eb8 100644 --- a/js/src/jsopcode.tbl +++ b/js/src/jsopcode.tbl @@ -578,10 +578,8 @@ OPDEF(JSOP_OBJTOP, 222,"objtop", NULL, 3, 0, 0, 0, JOF_UINT16 */ OPDEF(JSOP_SETMETHOD, 223,"setmethod", NULL, 3, 2, 1, 3, JOF_ATOM|JOF_PROP|JOF_SET|JOF_DETECTING) OPDEF(JSOP_INITMETHOD, 224,"initmethod", NULL, 3, 2, 1, 3, JOF_ATOM|JOF_PROP|JOF_SET|JOF_DETECTING) -OPDEF(JSOP_UNBRAND, 225,"unbrand", NULL, 1, 1, 1, 0, JOF_BYTE) -OPDEF(JSOP_UNBRANDTHIS, 226,"unbrandthis", NULL, 1, 0, 0, 0, JOF_BYTE) -OPDEF(JSOP_SHARPINIT, 227,"sharpinit", NULL, 3, 0, 0, 0, JOF_UINT16|JOF_SHARPSLOT) +OPDEF(JSOP_SHARPINIT, 225,"sharpinit", NULL, 3, 0, 0, 0, JOF_UINT16|JOF_SHARPSLOT) /* Pop the stack, convert to a jsid (int or string), and push back. */ -OPDEF(JSOP_TOID, 228, "toid", NULL, 1, 1, 1, 0, JOF_BYTE) +OPDEF(JSOP_TOID, 226, "toid", NULL, 1, 1, 1, 0, JOF_BYTE) diff --git a/js/src/jsprobes.h b/js/src/jsprobes.h index e0ea4cf6a859..ec2f484034c6 100644 --- a/js/src/jsprobes.h +++ b/js/src/jsprobes.h @@ -137,6 +137,9 @@ bool resizeHeap(JSCompartment *compartment, size_t oldSize, size_t newSize); */ bool createObject(JSContext *cx, JSObject *obj); +/* Resize events are being tracked. */ +bool objectResizeActive(); + /* Object has been resized */ bool resizeObject(JSContext *cx, JSObject *obj, size_t oldSize, size_t newSize); @@ -489,6 +492,17 @@ Probes::finalizeObject(JSObject *obj) return ok; } +inline bool +Probes::objectResizeActive() +{ +#ifdef MOZ_ETW + if (ProfilingActive) + return true; +#endif + + return false; +} + inline bool Probes::resizeObject(JSContext *cx, JSObject *obj, size_t oldSize, size_t newSize) { diff --git a/js/src/jspropertycache.cpp b/js/src/jspropertycache.cpp index 68d15d987630..1a77d41a230a 100644 --- a/js/src/jspropertycache.cpp +++ b/js/src/jspropertycache.cpp @@ -46,26 +46,17 @@ using namespace js; -JS_STATIC_ASSERT(sizeof(PCVal) == sizeof(jsuword)); - JS_REQUIRES_STACK PropertyCacheEntry * PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *pobj, - const Shape *shape, JSBool adding) + const Shape *shape) { - jsuword kshape, vshape; JSOp op; const JSCodeSpec *cs; - PCVal vword; PropertyCacheEntry *entry; JS_ASSERT(this == &JS_PROPERTY_CACHE(cx)); JS_ASSERT(!cx->runtime->gcRunning); - if (js_IsPropertyCacheDisabled(cx)) { - PCMETER(disfills++); - return JS_NO_PROP_CACHE_FILL; - } - /* * Check for fill from js_SetPropertyHelper where the setter removed shape * from pobj (via unwatch or delete, e.g.). @@ -75,15 +66,6 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po return JS_NO_PROP_CACHE_FILL; } - /* - * Dictionary-mode objects have unique shapes, so there is no way to cache - * a prediction of the next shape when adding. - */ - if (adding && obj->inDictionaryMode()) { - PCMETER(add2dictfills++); - return JS_NO_PROP_CACHE_FILL; - } - /* * Check for overdeep scope and prototype chain. Because resolve, getter, * and setter hooks can change the prototype chain using JS_SetPrototype @@ -93,16 +75,26 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po * The scopeIndex can't be wrong. We require JS_SetParent calls to happen * before any running script might consult a parent-linked scope chain. If * this requirement is not satisfied, the fill in progress will never hit, - * but vcap vs. scope shape tests ensure nothing malfunctions. + * but scope shape tests ensure nothing malfunctions. */ JS_ASSERT_IF(obj == pobj, scopeIndex == 0); JSObject *tmp = obj; for (uintN i = 0; i != scopeIndex; i++) - tmp = tmp->getParent(); + tmp = tmp->internalScopeChain(); uintN protoIndex = 0; while (tmp != pobj) { + + /* + * Don't cache entries across prototype lookups which can mutate in + * arbitrary ways without a shape change. + */ + if (tmp->hasUncacheableProto()) { + PCMETER(noprotos++); + return JS_NO_PROP_CACHE_FILL; + } + tmp = tmp->getProto(); /* @@ -117,7 +109,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po ++protoIndex; } - if (scopeIndex > PCVCAP_SCOPEMASK || protoIndex > PCVCAP_PROTOMASK) { + if (scopeIndex > PCINDEX_SCOPEMASK || protoIndex > PCINDEX_PROTOMASK) { PCMETER(longchains++); return JS_NO_PROP_CACHE_FILL; } @@ -130,133 +122,9 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po JSScript *script = cx->stack.currentScript(&pc); op = js_GetOpcode(cx, script, pc); cs = &js_CodeSpec[op]; - kshape = 0; - do { - /* - * Check for a prototype "plain old method" callee computation. What - * is a plain old method? It's a function-valued property with stub - * getter, so get of a function is idempotent. - */ - if (cs->format & JOF_CALLOP) { - if (shape->isMethod()) { - /* - * A compiler-created function object, AKA a method, already - * memoized in the property tree. - */ - JS_ASSERT(pobj->hasMethodBarrier()); - JSObject &funobj = shape->methodObject(); - JS_ASSERT(funobj == pobj->nativeGetSlot(shape->slot).toObject()); - vword.setFunObj(funobj); - break; - } - - /* - * N.B. Objects are not branded if type inference is enabled, to - * allow property accesses without shape checks in JIT code. - */ - if (!pobj->generic() && shape->hasDefaultGetter() && pobj->containsSlot(shape->slot) && - !cx->typeInferenceEnabled()) { - const Value &v = pobj->nativeGetSlot(shape->slot); - JSObject *funobj; - - if (IsFunctionObject(v, &funobj)) { - /* - * Great, we have a function-valued prototype property - * where the getter is JS_PropertyStub. The type id in - * pobj does not evolve with changes to property values, - * however. - * - * So here, on first cache fill for this method, we brand - * obj with a new shape and set the JSObject::BRANDED flag. - * Once this flag is set, any property assignment that - * changes the value from or to a different function object - * will result in shape being regenerated. - */ - if (!pobj->branded()) { - PCMETER(brandfills++); -#ifdef DEBUG_notme - JSFunction *fun = JSVAL_TO_OBJECT(v)->getFunctionPrivate(); - JSAutoByteString funNameBytes; - if (const char *funName = GetFunctionNameBytes(cx, fun, &funNameBytes)) { - fprintf(stderr, - "branding %p (%s) for funobj %p (%s), shape %lu\n", - pobj, pobj->getClass()->name, JSVAL_TO_OBJECT(v), funName, - obj->shape()); - } -#endif - if (!pobj->brand(cx)) - return JS_NO_PROP_CACHE_FILL; - } - vword.setFunObj(*funobj); - break; - } - } - } else if ((cs->format & (JOF_SET | JOF_FOR | JOF_INCDEC)) && obj->watched()) { - return JS_NO_PROP_CACHE_FILL; - } - - /* - * If getting a value via a stub getter, or doing an INCDEC op - * with stub getters and setters, we can cache the slot. - */ - if (!(cs->format & (JOF_SET | JOF_FOR)) && - (!(cs->format & JOF_INCDEC) || (shape->hasDefaultSetter() && shape->writable())) && - shape->hasDefaultGetter() && - pobj->containsSlot(shape->slot)) { - /* Great, let's cache shape's slot and use it on cache hit. */ - vword.setSlot(shape->slot); - } else { - /* Best we can do is to cache shape (still a nice speedup). */ - vword.setShape(shape); - if (adding && - pobj->shape() == shape->shapeid) { - /* - * Our caller added a new property. We also know that a setter - * that js_NativeSet might have run has not mutated pobj, so - * the added property is still the last one added, and pobj is - * not branded. - * - * We want to cache under pobj's shape before the property - * addition to bias for the case when the mutator opcode - * always adds the same property. This allows us to optimize - * periodic execution of object initializers or other explicit - * initialization sequences such as - * - * obj = {}; obj.x = 1; obj.y = 2; - * - * We assume that on average the win from this optimization is - * greater than the cost of an extra mismatch per loop owing to - * the bias for the following case: - * - * obj = {}; ... for (...) { ... obj.x = ... } - * - * On the first iteration of such a for loop, JSOP_SETPROP - * fills the cache with the shape of the newly created object - * obj, not the shape of obj after obj.x has been assigned. - * That mismatches obj's shape on the second iteration. Note - * that on the third and subsequent iterations the cache will - * be hit because the shape is no longer updated. - */ - JS_ASSERT(shape == pobj->lastProperty()); - JS_ASSERT(!pobj->nativeEmpty()); - - kshape = shape->previous()->shapeid; - - /* - * When adding we predict no prototype object will later gain a - * readonly property or setter. - */ - vshape = cx->runtime->protoHazardShape; - } - } - } while (0); - - if (kshape == 0) { - kshape = obj->shape(); - vshape = pobj->shape(); - } - JS_ASSERT(kshape < SHAPE_OVERFLOW_BIT); + if ((cs->format & JOF_SET) && obj->watched()) + return JS_NO_PROP_CACHE_FILL; if (obj == pobj) { JS_ASSERT(scopeIndex == 0 && protoIndex == 0); @@ -272,20 +140,15 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, JSObject *po /* * Make sure that a later shadowing assignment will enter * PurgeProtoChain and invalidate this entry, bug 479198. - * - * This is not thread-safe but we are about to make all objects - * except multi-threaded wrappers (bug 566951) single-threaded. - * And multi-threaded wrappers are non-native Proxy instances, so - * they won't use the property cache. */ - obj->setDelegate(); + if (!obj->isDelegate()) + return JS_NO_PROP_CACHE_FILL; } } - JS_ASSERT(vshape < SHAPE_OVERFLOW_BIT); - entry = &table[hash(pc, kshape)]; + entry = &table[hash(pc, obj->lastProperty())]; PCMETER(entry->vword.isNull() || recycles++); - entry->assign(pc, kshape, vshape, scopeIndex, protoIndex, vword); + entry->assign(pc, obj->lastProperty(), pobj->lastProperty(), shape, scopeIndex, protoIndex); empty = false; PCMETER(fills++); @@ -322,8 +185,6 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject PropertyCacheEntry *entry) { JSObject *obj, *pobj, *tmp; - uint32 vcap; - JSScript *script = cx->stack.currentScript(); JS_ASSERT(this == &JS_PROPERTY_CACHE(cx)); @@ -333,25 +194,24 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject const JSCodeSpec &cs = js_CodeSpec[op]; obj = *objp; - vcap = entry->vcap; + uint32 vindex = entry->vindex; if (entry->kpc != pc) { PCMETER(kpcmisses++); JSAtom *atom = GetAtomFromBytecode(cx, pc, op, cs); #ifdef DEBUG_notme - JSScript *script = cx->fp()->getScript(); JSAutoByteString printable; fprintf(stderr, "id miss for %s from %s:%u" - " (pc %u, kpc %u, kshape %u, shape %u)\n", + " (pc %u, kpc %u, kshape %p, shape %p)\n", js_AtomToPrintableString(cx, atom, &printable), script->filename, js_PCToLineNumber(cx, script, pc), pc - script->code, entry->kpc - script->code, entry->kshape, - obj->shape()); + obj->lastProperty()); js_Disassemble1(cx, script, pc, pc - script->code, JS_FALSE, stderr); @@ -360,39 +220,38 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject return atom; } - if (entry->kshape != obj->shape()) { + if (entry->kshape != obj->lastProperty()) { PCMETER(kshapemisses++); return GetAtomFromBytecode(cx, pc, op, cs); } /* * PropertyCache::test handles only the direct and immediate-prototype hit - * cases. All others go here. We could embed the target object in the cache - * entry but then entry size would be 5 words. Instead we traverse chains. + * cases. All others go here. */ pobj = obj; if (JOF_MODE(cs.format) == JOF_NAME) { - while (vcap & (PCVCAP_SCOPEMASK << PCVCAP_PROTOBITS)) { - tmp = pobj->getParent(); + while (vindex & (PCINDEX_SCOPEMASK << PCINDEX_PROTOBITS)) { + tmp = pobj->scopeChain(); if (!tmp || !tmp->isNative()) break; pobj = tmp; - vcap -= PCVCAP_PROTOSIZE; + vindex -= PCINDEX_PROTOSIZE; } *objp = pobj; } - while (vcap & PCVCAP_PROTOMASK) { + while (vindex & PCINDEX_PROTOMASK) { tmp = pobj->getProto(); if (!tmp || !tmp->isNative()) break; pobj = tmp; - --vcap; + --vindex; } - if (matchShape(cx, pobj, vcap >> PCVCAP_TAGBITS)) { + if (pobj->lastProperty() == entry->pshape) { #ifdef DEBUG JSAtom *atom = GetAtomFromBytecode(cx, pc, op, cs); jsid id = ATOM_TO_JSID(atom); @@ -416,8 +275,9 @@ PropertyCache::assertEmpty() for (uintN i = 0; i < SIZE; i++) { JS_ASSERT(!table[i].kpc); JS_ASSERT(!table[i].kshape); - JS_ASSERT(!table[i].vcap); - JS_ASSERT(table[i].vword.isNull()); + JS_ASSERT(!table[i].pshape); + JS_ASSERT(!table[i].prop); + JS_ASSERT(!table[i].vindex); } } #endif @@ -431,7 +291,6 @@ PropertyCache::purge(JSContext *cx) } PodArrayZero(table); - JS_ASSERT(table[0].vword.isNull()); empty = true; #ifdef JS_PROPERTY_CACHE_METERING @@ -491,22 +350,6 @@ PropertyCache::purge(JSContext *cx) PCMETER(flushes++); } -void -PropertyCache::purgeForScript(JSContext *cx, JSScript *script) -{ - JS_ASSERT(!cx->runtime->gcRunning); - - for (PropertyCacheEntry *entry = table; entry < table + SIZE; entry++) { - if (UnsignedPtrDiff(entry->kpc, script->code) < script->length) { - entry->kpc = NULL; -#ifdef DEBUG - entry->kshape = entry->vcap = 0; - entry->vword.setNull(); -#endif - } - } -} - void PropertyCache::restore(PropertyCacheEntry *entry) { diff --git a/js/src/jspropertycache.h b/js/src/jspropertycache.h index 1874b5e1a10f..a9d545bd7d16 100644 --- a/js/src/jspropertycache.h +++ b/js/src/jspropertycache.h @@ -53,97 +53,38 @@ namespace js { * . */ -/* Property cache value capabilities. */ +/* Indexing for property cache entry scope and prototype chain walking. */ enum { - PCVCAP_PROTOBITS = 4, - PCVCAP_PROTOSIZE = JS_BIT(PCVCAP_PROTOBITS), - PCVCAP_PROTOMASK = JS_BITMASK(PCVCAP_PROTOBITS), + PCINDEX_PROTOBITS = 4, + PCINDEX_PROTOSIZE = JS_BIT(PCINDEX_PROTOBITS), + PCINDEX_PROTOMASK = JS_BITMASK(PCINDEX_PROTOBITS), - PCVCAP_SCOPEBITS = 4, - PCVCAP_SCOPESIZE = JS_BIT(PCVCAP_SCOPEBITS), - PCVCAP_SCOPEMASK = JS_BITMASK(PCVCAP_SCOPEBITS), - - PCVCAP_TAGBITS = PCVCAP_PROTOBITS + PCVCAP_SCOPEBITS, - PCVCAP_TAGMASK = JS_BITMASK(PCVCAP_TAGBITS) -}; - -const uint32 SHAPE_OVERFLOW_BIT = JS_BIT(32 - PCVCAP_TAGBITS); - -/* - * Property cache value. This is simply a tagged union: - * PCVal = (JSObject * | uint32 | js::Shape *). - * It is the type of PropertyCacheEntry::vword and combines with the tag bits - * of PropertyCacheEntry::vcap to tell how to get or set the property, once a - * property cache hit is validated. - * - * PropertyCache::purge depends on the bit-pattern of a null PCVal being 0. - */ -class PCVal -{ - private: - enum { - OBJECT = 0, - SLOT = 1, - SHAPE = 2, - TAG = 3 - }; - - jsuword v; - - public: - bool isNull() const { return v == 0; } - void setNull() { v = 0; } - - bool isFunObj() const { return (v & TAG) == OBJECT; } - JSObject &toFunObj() const { - JS_ASSERT(isFunObj()); - return *reinterpret_cast(v); - } - void setFunObj(JSObject &obj) { - v = reinterpret_cast(&obj); - } - - bool isSlot() const { return v & SLOT; } - uint32 toSlot() const { JS_ASSERT(isSlot()); return uint32(v) >> 1; } - void setSlot(uint32 slot) { v = (jsuword(slot) << 1) | SLOT; } - - bool isShape() const { return (v & TAG) == SHAPE; } - const js::Shape *toShape() const { - JS_ASSERT(isShape()); - return reinterpret_cast(v & ~TAG); - } - void setShape(const js::Shape *shape) { - JS_ASSERT(shape); - v = reinterpret_cast(shape) | SHAPE; - } + PCINDEX_SCOPEBITS = 4, + PCINDEX_SCOPESIZE = JS_BIT(PCINDEX_SCOPEBITS), + PCINDEX_SCOPEMASK = JS_BITMASK(PCINDEX_SCOPEBITS) }; struct PropertyCacheEntry { jsbytecode *kpc; /* pc of cache-testing bytecode */ - jsuword kshape; /* shape of direct (key) object */ - jsuword vcap; /* value capability, see above */ - PCVal vword; /* value word, see PCVal above */ + const Shape *kshape; /* shape of direct (key) object */ + const Shape *pshape; /* shape of owning object */ + const Shape *prop; /* shape of accessed property */ + uint16 vindex; /* scope/proto chain indexing, + * see PCINDEX above */ - bool adding() const { return vcapTag() == 0 && kshape != vshape(); } - bool directHit() const { return vcapTag() == 0 && kshape == vshape(); } + bool directHit() const { return vindex == 0; } - jsuword vcapTag() const { return vcap & PCVCAP_TAGMASK; } - uint32 vshape() const { return uint32(vcap >> PCVCAP_TAGBITS); } - jsuword scopeIndex() const { return (vcap >> PCVCAP_PROTOBITS) & PCVCAP_SCOPEMASK; } - jsuword protoIndex() const { return vcap & PCVCAP_PROTOMASK; } - - void assign(jsbytecode *kpc, jsuword kshape, jsuword vshape, - uintN scopeIndex, uintN protoIndex, PCVal vword) { - JS_ASSERT(kshape < SHAPE_OVERFLOW_BIT); - JS_ASSERT(vshape < SHAPE_OVERFLOW_BIT); - JS_ASSERT(scopeIndex <= PCVCAP_SCOPEMASK); - JS_ASSERT(protoIndex <= PCVCAP_PROTOMASK); + void assign(jsbytecode *kpc, const Shape *kshape, const Shape *pshape, + const Shape *prop, uintN scopeIndex, uintN protoIndex) { + JS_ASSERT(scopeIndex <= PCINDEX_SCOPEMASK); + JS_ASSERT(protoIndex <= PCINDEX_PROTOMASK); this->kpc = kpc; this->kshape = kshape; - this->vcap = (vshape << PCVCAP_TAGBITS) | (scopeIndex << PCVCAP_PROTOBITS) | protoIndex; - this->vword = vword; + this->pshape = pshape; + this->prop = prop; + this->vindex = (scopeIndex << PCINDEX_PROTOBITS) | protoIndex; } }; @@ -212,15 +153,10 @@ class PropertyCache } private: - /* - * Add kshape rather than xor it to avoid collisions between nearby bytecode - * that are evolving an object by setting successive properties, incrementing - * the object's shape on each set. - */ static inline jsuword - hash(jsbytecode *pc, jsuword kshape) + hash(jsbytecode *pc, const Shape *kshape) { - return ((((jsuword(pc) >> SIZE_LOG2) ^ jsuword(pc)) + kshape) & MASK); + return (((jsuword(pc) >> SIZE_LOG2) ^ jsuword(pc) ^ ((jsuword)kshape >> 3)) & MASK); } static inline bool matchShape(JSContext *cx, JSObject *obj, uint32 shape); @@ -250,18 +186,6 @@ class PropertyCache PropertyCacheEntry **entryp, JSObject **obj2p, JSAtom **atomp); - /* - * Test for cached information about creating a new own data property on obj at pc. - * - * On a hit, set *shapep to an shape from the property tree describing the - * new property as well as all existing properties on obj and return - * true. Otherwise return false. - * - * Hit or miss, *entryp receives a pointer to the property cache entry. - */ - JS_ALWAYS_INLINE bool testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, - const js::Shape **shapep, PropertyCacheEntry **entryp); - /* * Fill property cache entry for key cx->fp->pc, optimized value word * computed from obj and shape, and entry capability forged from 24-bit @@ -271,11 +195,9 @@ class PropertyCache * not possible. */ JS_REQUIRES_STACK PropertyCacheEntry *fill(JSContext *cx, JSObject *obj, uintN scopeIndex, - JSObject *pobj, const js::Shape *shape, - JSBool adding = false); + JSObject *pobj, const js::Shape *shape); void purge(JSContext *cx); - void purgeForScript(JSContext *cx, JSScript *script); /* Restore an entry that may have been purged during a GC. */ void restore(PropertyCacheEntry *entry); diff --git a/js/src/jspropertycacheinlines.h b/js/src/jspropertycacheinlines.h index 71b400333f8a..fc15d1adecd3 100644 --- a/js/src/jspropertycacheinlines.h +++ b/js/src/jspropertycacheinlines.h @@ -48,12 +48,6 @@ using namespace js; -/* static */ inline bool -PropertyCache::matchShape(JSContext *cx, JSObject *obj, uint32 shape) -{ - return obj->shape() == shape; -} - /* * This method is designed to inline the fast path in js_Interpret, so it makes * "just-so" restrictions on parameters, e.g. pobj and obj should not be the @@ -75,23 +69,22 @@ PropertyCache::test(JSContext *cx, jsbytecode *pc, JSObject *&obj, { JS_ASSERT(this == &JS_PROPERTY_CACHE(cx)); - uint32 kshape = obj->shape(); + const Shape *kshape = obj->lastProperty(); entry = &table[hash(pc, kshape)]; PCMETER(pctestentry = entry); PCMETER(tests++); JS_ASSERT(&obj != &pobj); - JS_ASSERT(entry->kshape < SHAPE_OVERFLOW_BIT); if (entry->kpc == pc && entry->kshape == kshape) { JSObject *tmp; pobj = obj; - if (entry->vcapTag() == 1 && + if (entry->vindex == 1 && (tmp = pobj->getProto()) != NULL) { pobj = tmp; } - if (matchShape(cx, pobj, entry->vshape())) { + if (pobj->lastProperty() == entry->pshape) { PCMETER(pchits++); - PCMETER(!entry->vcapTag() || protopchits++); + PCMETER(!entry->vindex || protopchits++); atom = NULL; return; } @@ -105,14 +98,15 @@ JS_ALWAYS_INLINE bool PropertyCache::testForSet(JSContext *cx, jsbytecode *pc, JSObject *obj, PropertyCacheEntry **entryp, JSObject **obj2p, JSAtom **atomp) { - uint32 shape = obj->shape(); - PropertyCacheEntry *entry = &table[hash(pc, shape)]; + JS_ASSERT(this == &JS_PROPERTY_CACHE(cx)); + + const Shape *kshape = obj->lastProperty(); + PropertyCacheEntry *entry = &table[hash(pc, kshape)]; *entryp = entry; PCMETER(pctestentry = entry); PCMETER(tests++); PCMETER(settests++); - JS_ASSERT(entry->kshape < SHAPE_OVERFLOW_BIT); - if (entry->kpc == pc && entry->kshape == shape) + if (entry->kpc == pc && entry->kshape == kshape) return true; JSAtom *atom = fullTest(cx, pc, &obj, obj2p, entry); @@ -125,34 +119,4 @@ PropertyCache::testForSet(JSContext *cx, jsbytecode *pc, JSObject *obj, return false; } -JS_ALWAYS_INLINE bool -PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, - const js::Shape **shapep, PropertyCacheEntry **entryp) -{ - JS_ASSERT(obj->slotSpan() >= JSSLOT_FREE(obj->getClass())); - uint32 kshape = obj->shape(); - PropertyCacheEntry *entry = &table[hash(pc, kshape)]; - *entryp = entry; - PCMETER(pctestentry = entry); - PCMETER(tests++); - PCMETER(initests++); - JS_ASSERT(entry->kshape < SHAPE_OVERFLOW_BIT); - - if (entry->kpc == pc && - entry->kshape == kshape && - entry->vshape() == rt->protoHazardShape) { - // If obj is not extensible, we cannot have a cache hit. This happens - // for sharp-variable expressions like (#1={x: Object.seal(#1#)}). - JS_ASSERT(obj->isExtensible()); - - PCMETER(pchits++); - PCMETER(inipchits++); - JS_ASSERT(entry->vcapTag() == 0); - *shapep = entry->vword.toShape(); - JS_ASSERT((*shapep)->writable()); - return true; - } - return false; -} - #endif /* jspropertycacheinlines_h___ */ diff --git a/js/src/jspropertytree.cpp b/js/src/jspropertytree.cpp index 14bb3f8cd6e6..f67784151d28 100644 --- a/js/src/jspropertytree.cpp +++ b/js/src/jspropertytree.cpp @@ -61,7 +61,7 @@ ShapeHasher::hash(const Lookup l) inline bool ShapeHasher::match(const Key k, const Lookup l) { - return l->matches(k); + return k->matches(l); } Shape * @@ -95,8 +95,6 @@ PropertyTree::insertChild(JSContext *cx, Shape *parent, Shape *child) JS_ASSERT(!parent->inDictionary()); JS_ASSERT(!child->parent); JS_ASSERT(!child->inDictionary()); - JS_ASSERT(!JSID_IS_VOID(parent->propid)); - JS_ASSERT(!JSID_IS_VOID(child->propid)); JS_ASSERT(cx->compartment == compartment); JS_ASSERT(child->compartment() == parent->compartment()); @@ -136,7 +134,6 @@ void Shape::removeChild(Shape *child) { JS_ASSERT(!child->inDictionary()); - JS_ASSERT(!JSID_IS_VOID(propid)); KidsPointer *kidp = &kids; @@ -179,7 +176,6 @@ PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child) Shape *shape; JS_ASSERT(parent); - JS_ASSERT(!JSID_IS_VOID(parent->propid)); /* * The property tree has extremely low fan-out below its root in @@ -206,8 +202,10 @@ PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child) if (!shape) return NULL; - new (shape) Shape(child.propid, child.getter(), child.setter(), child.slot, child.attrs, - child.flags, child.shortid, js_GenerateShape(cx)); + UnownedBaseShape *base = child.base()->unowned(); + + new (shape) Shape(&child); + shape->base_.init(base); if (!insertChild(cx, parent, shape)) return NULL; @@ -216,7 +214,7 @@ PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child) } void -Shape::finalize(JSContext *cx) +Shape::finalize(JSContext *cx, bool background) { if (!inDictionary()) { if (parent && parent->isMarked()) @@ -225,8 +223,6 @@ Shape::finalize(JSContext *cx) if (kids.isHash()) cx->delete_(kids.toHash()); } - - freeTable(cx); } #ifdef DEBUG @@ -247,6 +243,8 @@ KidsPointer::checkConsistency(const Shape *aKid) const void Shape::dump(JSContext *cx, FILE *fp) const { + jsid propid = this->propid(); + JS_ASSERT(!JSID_IS_VOID(propid)); if (JSID_IS_INT(propid)) { @@ -269,10 +267,11 @@ Shape::dump(JSContext *cx, FILE *fp) const FileEscapedString(fp, str, '"'); } - fprintf(fp, " g/s %p/%p slot %u attrs %x ", - JS_FUNC_TO_DATA_PTR(void *, getter()), - JS_FUNC_TO_DATA_PTR(void *, setter()), - slot, attrs); + fprintf(fp, " g/s %p/%p slot %d attrs %x ", + JS_FUNC_TO_DATA_PTR(void *, base()->rawGetter), + JS_FUNC_TO_DATA_PTR(void *, base()->rawSetter), + hasSlot() ? slot() : -1, attrs); + if (attrs) { int first = 1; fputs("(", fp); @@ -299,7 +298,7 @@ Shape::dump(JSContext *cx, FILE *fp) const fputs(") ", fp); } - fprintf(fp, "shortid %d\n", shortid); + fprintf(fp, "shortid %d\n", shortid()); } void @@ -307,8 +306,8 @@ Shape::dumpSubtree(JSContext *cx, int level, FILE *fp) const { if (!parent) { JS_ASSERT(level == 0); - JS_ASSERT(JSID_IS_EMPTY(propid)); - fprintf(fp, "class %s emptyShape %u\n", clasp->name, shapeid); + JS_ASSERT(JSID_IS_EMPTY(propid_)); + fprintf(fp, "class %s emptyShape\n", getObjectClass()->name); } else { fprintf(fp, "%*sid ", level, ""); dump(cx, fp); @@ -357,6 +356,7 @@ js::PropertyTree::dumpShapes(JSContext *cx) fprintf(dumpfp, "*** Compartment %p ***\n", (void *)c.get()); + /* typedef JSCompartment::EmptyShapeSet HS; HS &h = c->emptyShapes; for (HS::Range r = h.all(); !r.empty(); r.popFront()) { @@ -364,6 +364,7 @@ js::PropertyTree::dumpShapes(JSContext *cx) empty->dumpSubtree(cx, 0, dumpfp); putc('\n', dumpfp); } + */ } } #endif diff --git a/js/src/jsproxy.cpp b/js/src/jsproxy.cpp index 142f8864ffef..68b751a73aca 100644 --- a/js/src/jsproxy.cpp +++ b/js/src/jsproxy.cpp @@ -67,7 +67,7 @@ GetCall(JSObject *proxy) static inline Value GetConstruct(JSObject *proxy) { - if (proxy->numSlots() <= JSSLOT_PROXY_CONSTRUCT) + if (proxy->slotSpan() <= JSSLOT_PROXY_CONSTRUCT) return UndefinedValue(); return proxy->getSlot(JSSLOT_PROXY_CONSTRUCT); } @@ -76,7 +76,7 @@ static inline const HeapValue & GetFunctionProxyConstruct(JSObject *proxy) { JS_ASSERT(IsFunctionProxy(proxy)); - JS_ASSERT(proxy->numSlots() > JSSLOT_PROXY_CONSTRUCT); + JS_ASSERT(proxy->slotSpan() > JSSLOT_PROXY_CONSTRUCT); return proxy->getSlotRef(JSSLOT_PROXY_CONSTRUCT); } @@ -1475,11 +1475,11 @@ js::NewProxyObject(JSContext *cx, ProxyHandler *handler, const Value &priv, JSOb * their properties and so that we don't need to walk the compartment if * their prototype changes later. */ - if (proto) - proto->getNewType(cx, NULL, /* markUnknown = */ true); + if (proto && !proto->setNewTypeUnknown(cx)) + return NULL; - JSObject *obj = NewNonFunction(cx, clasp, proto, parent); - if (!obj || !obj->ensureInstanceReservedSlots(cx, 0)) + JSObject *obj = NewObjectWithGivenProto(cx, clasp, proto, parent); + if (!obj) return NULL; obj->setSlot(JSSLOT_PROXY_HANDLER, PrivateValue(handler)); obj->setSlot(JSSLOT_PROXY_PRIVATE, priv); @@ -1658,7 +1658,7 @@ callable_Construct(JSContext *cx, uintN argc, Value *vp) return false; } - JSObject *newobj = NewNativeClassInstance(cx, &ObjectClass, proto, proto->getParent()); + JSObject *newobj = NewObjectWithGivenProto(cx, &ObjectClass, proto, NULL); if (!newobj) return false; @@ -1725,7 +1725,7 @@ js::FixProxy(JSContext *cx, JSObject *proxy, JSBool *bp) * number of fixed slots as the proxy so that we can swap their contents. */ gc::AllocKind kind = proxy->getAllocKind(); - JSObject *newborn = NewNonFunction(cx, clasp, proto, parent, kind); + JSObject *newborn = NewObjectWithGivenProto(cx, clasp, proto, parent, kind); if (!newborn) return false; AutoObjectRooter tvr2(cx, newborn); @@ -1766,7 +1766,7 @@ Class js::ProxyClass = { JS_FRIEND_API(JSObject *) js_InitProxyClass(JSContext *cx, JSObject *obj) { - JSObject *module = NewNonFunction(cx, &ProxyClass, NULL, obj); + JSObject *module = NewObjectWithClassProto(cx, &ProxyClass, NULL, obj); if (!module || !module->setSingletonType(cx)) return NULL; diff --git a/js/src/jsprvtd.h b/js/src/jsprvtd.h index 05230ff6b6e0..54d2568ee495 100644 --- a/js/src/jsprvtd.h +++ b/js/src/jsprvtd.h @@ -117,7 +117,6 @@ class JSWrapper; namespace js { struct ArgumentsData; -struct FlatClosureData; struct Class; class RegExpObject; @@ -218,8 +217,11 @@ class LifoAlloc; class PropertyCache; struct PropertyCacheEntry; +class BaseShape; +class UnownedBaseShape; struct Shape; struct EmptyShape; +class ShapeKindArray; class Bindings; class MultiDeclRange; diff --git a/js/src/jspubtd.h b/js/src/jspubtd.h index d831b4032412..5935e2e6f752 100644 --- a/js/src/jspubtd.h +++ b/js/src/jspubtd.h @@ -201,6 +201,7 @@ typedef enum { JSTRACE_XML, #endif JSTRACE_SHAPE, + JSTRACE_BASE_SHAPE, JSTRACE_TYPE_OBJECT, JSTRACE_LAST = JSTRACE_TYPE_OBJECT } JSGCTraceKind; diff --git a/js/src/jsreflect.cpp b/js/src/jsreflect.cpp index 774f6c704ea4..3d0505adfd72 100644 --- a/js/src/jsreflect.cpp +++ b/js/src/jsreflect.cpp @@ -321,7 +321,7 @@ class NodeBuilder } bool newObject(JSObject **dst) { - JSObject *nobj = NewNonFunction(cx, &ObjectClass, NULL, NULL); + JSObject *nobj = NewBuiltinClassInstance(cx, &ObjectClass); if (!nobj) return false; @@ -631,7 +631,7 @@ NodeBuilder::newNode(ASTType type, TokenPos *pos, JSObject **dst) Value tv; - JSObject *node = NewNonFunction(cx, &ObjectClass, NULL, NULL); + JSObject *node = NewBuiltinClassInstance(cx, &ObjectClass); if (!node || !setNodeLoc(node, pos) || !atomValue(nodeTypeNames[type], &tv) || @@ -3264,7 +3264,7 @@ JS_BEGIN_EXTERN_C JS_PUBLIC_API(JSObject *) JS_InitReflect(JSContext *cx, JSObject *obj) { - JSObject *Reflect = NewNonFunction(cx, &ObjectClass, NULL, obj); + JSObject *Reflect = NewObjectWithClassProto(cx, &ObjectClass, NULL, obj); if (!Reflect || !Reflect->setSingletonType(cx)) return NULL; diff --git a/js/src/jsscope.cpp b/js/src/jsscope.cpp index 0fed411ca8df..223bed7ce537 100644 --- a/js/src/jsscope.cpp +++ b/js/src/jsscope.cpp @@ -66,68 +66,6 @@ using namespace js; using namespace js::gc; -uint32 -js_GenerateShape(JSRuntime *rt) -{ - uint32 shape; - - shape = JS_ATOMIC_INCREMENT(&rt->shapeGen); - JS_ASSERT(shape != 0); - if (shape >= SHAPE_OVERFLOW_BIT) { - /* - * FIXME bug 440834: The shape id space has overflowed. Currently we - * cope badly with this and schedule the GC on the every call. But - * first we make sure that increments from other threads would not - * have a chance to wrap around shapeGen to zero. - */ - rt->shapeGen = SHAPE_OVERFLOW_BIT; - shape = SHAPE_OVERFLOW_BIT; - -#ifdef JS_THREADSAFE - AutoLockGC lockIf(rt); -#endif - TriggerGC(rt, gcstats::SHAPE); - } - return shape; -} - -uint32 -js_GenerateShape(JSContext *cx) -{ - return js_GenerateShape(cx->runtime); -} - -bool -JSObject::ensureClassReservedSlotsForEmptyObject(JSContext *cx) -{ - JS_ASSERT(nativeEmpty()); - - /* - * Subtle rule: objects that call JSObject::ensureInstanceReservedSlots - * must either: - * - * (a) never escape anywhere an ad-hoc property could be set on them; or - * - * (b) protect their instance-reserved slots with shapes, at least a custom - * empty shape with the right slotSpan member. - * - * Block objects are the only objects that fall into category (a). While - * Call objects cannot escape, they can grow ad-hoc properties via eval - * of a var declaration, or due to a function statement being evaluated, - * but they have slots mapped by compiler-created shapes, and thus (b) no - * problem predicting first ad-hoc property slot. Bound Function objects - * have a custom empty shape. - * - * (Note that Block, Call, and bound Function objects are the only native - * class objects that are allowed to call ensureInstanceReservedSlots.) - */ - uint32 nfixed = JSSLOT_FREE(getClass()); - if (nfixed > numSlots() && !allocSlots(cx, nfixed)) - return false; - - return true; -} - bool PropertyTable::init(JSRuntime *rt, Shape *lastProp) { @@ -153,7 +91,7 @@ PropertyTable::init(JSRuntime *rt, Shape *lastProp) hashShift = JS_DHASH_BITS - sizeLog2; for (Shape::Range r = lastProp->all(); !r.empty(); r.popFront()) { const Shape &shape = r.front(); - Shape **spp = search(shape.propid, true); + Shape **spp = search(shape.propid(), true); /* * Beware duplicate args and arg vs. var conflicts: the youngest shape @@ -165,10 +103,51 @@ PropertyTable::init(JSRuntime *rt, Shape *lastProp) return true; } +bool +Shape::makeOwnBaseShape(JSContext *cx) +{ + JS_ASSERT(!base()->isOwned()); + + BaseShape *nbase = js_NewGCBaseShape(cx); + if (!nbase) + return false; + + new (nbase) BaseShape(*base()); + nbase->setOwned(base()->toUnowned()); + + this->base_ = nbase; + + return true; +} + +void +Shape::handoffTableTo(Shape *shape) +{ + JS_ASSERT(inDictionary() && shape->inDictionary()); + + if (this == shape) + return; + + JS_ASSERT(base()->isOwned() && !shape->base()->isOwned()); + + BaseShape *nbase = base(); + + JS_ASSERT_IF(shape->hasSlot(), nbase->slotSpan() > shape->slot()); + + this->base_ = nbase->baseUnowned(); + nbase->adoptUnowned(shape->base()->toUnowned()); + + shape->base_ = nbase; +} + bool Shape::hashify(JSContext *cx) { JS_ASSERT(!hasTable()); + + if (!ensureOwnBaseShape(cx)) + return false; + JSRuntime *rt = cx->runtime; PropertyTable *table = rt->new_(entryCount()); if (!table) @@ -179,7 +158,7 @@ Shape::hashify(JSContext *cx) return false; } - setTable(table); + base()->setTable(table); return true; } @@ -199,7 +178,7 @@ PropertyTable::search(jsid id, bool adding) uint32 sizeMask; JS_ASSERT(entries); - JS_ASSERT(!JSID_IS_VOID(id)); + JS_ASSERT(!JSID_IS_EMPTY(id)); /* Compute the primary hash address. */ hash0 = HashId(id); @@ -213,7 +192,7 @@ PropertyTable::search(jsid id, bool adding) /* Hit: return entry. */ shape = SHAPE_CLEAR_COLLISION(stored); - if (shape && shape->propid.get() == id) + if (shape && shape->propid() == id) return spp; /* Collision: double hash. */ @@ -247,7 +226,7 @@ PropertyTable::search(jsid id, bool adding) return (adding && firstRemoved) ? firstRemoved : spp; shape = SHAPE_CLEAR_COLLISION(stored); - if (shape && shape->propid.get() == id) { + if (shape && shape->propid() == id) { JS_ASSERT(collision_flag); return spp; } @@ -294,7 +273,7 @@ PropertyTable::change(int log2Delta, JSContext *cx) for (Shape **oldspp = oldTable; oldsize != 0; oldspp++) { Shape *shape = SHAPE_FETCH(oldspp); if (shape) { - Shape **spp = search(shape->propid, true); + Shape **spp = search(shape->propid(), true); JS_ASSERT(SHAPE_IS_FREE(*spp)); *spp = shape; } @@ -321,90 +300,80 @@ PropertyTable::grow(JSContext *cx) return true; } -void -Shape::update(js::PropertyOp getter, js::StrictPropertyOp setter, uint8 attrs) -{ - if (hasGetterValue()) - JSObject::writeBarrierPre(getterObject()); - if (hasSetterValue()) - JSObject::writeBarrierPre(setterObject()); - - this->rawGetter = getter; - this->rawSetter = setter; - this->attrs = attrs; - - if (hasGetterValue()) - JSObject::writeBarrierPost(getterObject(), this); - if (hasSetterValue()) - JSObject::writeBarrierPost(setterObject(), this); -} - Shape * -Shape::getChild(JSContext *cx, const js::Shape &child, HeapPtr *listp) +Shape::getChildBinding(JSContext *cx, const js::Shape &child, HeapPtrShape *lastBinding) { - JS_ASSERT(!JSID_IS_VOID(child.propid)); + JS_ASSERT(!inDictionary()); JS_ASSERT(!child.inDictionary()); - if (inDictionary()) { - Shape *oldShape = *listp; - PropertyTable *table = (oldShape && oldShape->hasTable()) ? oldShape->getTable() : NULL; - - /* - * Attempt to grow table if needed before extending *listp, rather than - * risking OOM under table->grow after newDictionaryShape succeeds, and - * then have to fix up *listp. - */ - if (table && table->needsToGrow() && !table->grow(cx)) - return NULL; - - if (newDictionaryShape(cx, child, listp)) { - Shape *newShape = *listp; - - JS_ASSERT(oldShape == newShape->parent); - if (table) { - /* Add newShape to the property table. */ - Shape **spp = table->search(newShape->propid, true); - - /* - * Beware duplicate formal parameters, allowed by ECMA-262 in - * non-strict mode. Otherwise we know that Bindings::add (our - * caller) won't pass an id already in the table to us. In the - * case of duplicate formals, the last one wins, so while we - * must not overcount entries, we must store newShape. - */ - if (!SHAPE_FETCH(spp)) - ++table->entryCount; - SHAPE_STORE_PRESERVING_COLLISION(spp, newShape); - - /* Hand the table off from oldShape to newShape. */ - oldShape->setTable(NULL); - newShape->setTable(table); - } else { - if (!newShape->hasTable()) - newShape->hashify(cx); - } - return newShape; - } - - return NULL; - } - - if ((*listp)->entryCount() >= PropertyTree::MAX_HEIGHT) { - Shape *dprop = Shape::newDictionaryList(cx, listp); - if (!dprop) - return NULL; - return dprop->getChild(cx, child, listp); - } - Shape *shape = JS_PROPERTY_TREE(cx).getChild(cx, this, child); if (shape) { JS_ASSERT(shape->parent == this); - JS_ASSERT(this == *listp); - *listp = shape; + JS_ASSERT(this == *lastBinding); + *lastBinding = shape; + + /* + * Update the number of fixed slots which bindings of this shape will + * have. Bindings are constructed as new properties come in, so the + * call object allocation class is not known ahead of time. Compute + * the fixed slot count here, which will feed into call objects created + * off of the bindings. + */ + uint32 slots = child.slotSpan() + 1; /* Add one for private data. */ + gc::AllocKind kind = gc::GetGCObjectKind(slots); + + /* + * Make sure that the arguments and variables in the call object all + * end up in a contiguous range of slots. We need this to be able to + * embed the args/vars arrays in the TypeScriptNesting for the function + * after the call object's frame has finished. + */ + uint32 nfixed = gc::GetGCKindSlots(kind); + if (nfixed < slots) { + nfixed = CallObject::RESERVED_SLOTS + 1; + JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS + 1); + } + + shape->setNumFixedSlots(nfixed - 1); } return shape; } +/* static */ bool +Shape::replaceLastProperty(JSContext *cx, const BaseShape &base, JSObject *proto, HeapPtrShape *lastp) +{ + Shape *shape = *lastp; + JS_ASSERT(!shape->inDictionary()); + + if (!shape->parent) { + /* Treat as resetting the initial property of the shape hierarchy. */ + AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots()); + Shape *newShape = + EmptyShape::getInitialShape(cx, base.clasp, proto, + base.parent, kind, + base.flags & BaseShape::OBJECT_FLAG_MASK); + if (!newShape) + return false; + JS_ASSERT(newShape->numFixedSlots() == shape->numFixedSlots()); + *lastp = newShape; + return true; + } + + BaseShape *nbase = BaseShape::getUnowned(cx, base); + if (!nbase) + return false; + + Shape child(shape); + child.base_ = nbase; + + Shape *newShape = JS_PROPERTY_TREE(cx).getChild(cx, shape->parent, child); + if (!newShape) + return false; + + *lastp = newShape; + return true; +} + /* * Get or create a property-tree or dictionary child property of parent, which * must be lastProp if inDictionaryMode(), else parent must be one of lastProp @@ -413,71 +382,54 @@ Shape::getChild(JSContext *cx, const js::Shape &child, HeapPtr *listp) Shape * JSObject::getChildProperty(JSContext *cx, Shape *parent, Shape &child) { - JS_ASSERT(!JSID_IS_VOID(child.propid)); - JS_ASSERT(!child.inDictionary()); - /* - * Shared properties have no slot. Unshared properties allocate a slot here - * but may lose it due to a JS_ClearScope call. + * Shared properties have no slot, but slot_ will reflect that of parent. + * Unshared properties allocate a slot here but may lose it due to a + * JS_ClearScope call. */ - if (child.attrs & JSPROP_SHARED) { - child.slot = SHAPE_INVALID_SLOT; + if (!child.hasSlot()) { + child.setSlot(parent->maybeSlot()); } else { - /* - * We may have set slot from a nearly-matching shape, above. If so, - * we're overwriting that nearly-matching shape, so we can reuse its - * slot -- we don't need to allocate a new one. Similarly, we use a - * specific slot if provided by the caller. - */ - if (child.slot == SHAPE_INVALID_SLOT && !allocSlot(cx, &child.slot)) - return NULL; + if (child.hasMissingSlot()) { + uint32 slot; + if (!allocSlot(cx, &slot)) + return NULL; + child.setSlot(slot); + } else { + /* Slots can only be allocated out of order on objects in dictionary mode. */ + JS_ASSERT(inDictionaryMode() || + parent->hasMissingSlot() || + child.slot() == parent->maybeSlot() + 1); + } } Shape *shape; if (inDictionaryMode()) { - JS_ASSERT(parent == lastProp); - if (parent->frozen()) { - parent = Shape::newDictionaryList(cx, &lastProp); - if (!parent) - return NULL; - JS_ASSERT(!parent->frozen()); - } - shape = Shape::newDictionaryShape(cx, child, &lastProp); + JS_ASSERT(parent == lastProperty()); + shape = js_NewGCShape(cx); if (!shape) return NULL; + if (child.hasSlot() && child.slot() >= lastProperty()->base()->slotSpan()) { + if (!setSlotSpan(cx, child.slot() + 1)) + return NULL; + } + shape->initDictionaryShape(child, &shape_); } else { shape = JS_PROPERTY_TREE(cx).getChild(cx, parent, child); if (!shape) return NULL; JS_ASSERT(shape->parent == parent); - JS_ASSERT_IF(parent != lastProp, parent == lastProp->parent); - setLastProperty(shape); + JS_ASSERT_IF(parent != lastProperty(), parent == lastProperty()->parent); + if (!setLastProperty(cx, shape)) + return NULL; } - updateFlags(shape); - updateShape(cx); return shape; } Shape * -Shape::newDictionaryShape(JSContext *cx, const Shape &child, HeapPtr *listp) -{ - Shape *dprop = JS_PROPERTY_TREE(cx).newShape(cx); - if (!dprop) - return NULL; - - new (dprop) Shape(child.propid, child.getter(), child.setter(), child.slot, child.attrs, - (child.flags & ~FROZEN) | IN_DICTIONARY, child.shortid, - js_GenerateShape(cx), child.slotSpan); - - dprop->listp = NULL; - dprop->insertIntoDictionary(listp); - return dprop; -} - -Shape * -Shape::newDictionaryList(JSContext *cx, HeapPtr *listp) +Shape::newDictionaryList(JSContext *cx, HeapPtrShape *listp) { Shape *shape = *listp; Shape *list = shape; @@ -491,13 +443,14 @@ Shape::newDictionaryList(JSContext *cx, HeapPtr *listp) HeapPtrShape *childp = &root; while (shape) { - JS_ASSERT_IF(!shape->frozen(), !shape->inDictionary()); + JS_ASSERT(!shape->inDictionary()); - Shape *dprop = Shape::newDictionaryShape(cx, *shape, childp); + Shape *dprop = js_NewGCShape(cx); if (!dprop) { *listp = list; return NULL; } + dprop->initDictionaryShape(*shape, childp); JS_ASSERT(!dprop->hasTable()); childp = &dprop->parent; @@ -519,10 +472,27 @@ JSObject::toDictionaryMode(JSContext *cx) /* We allocate the shapes from cx->compartment, so make sure it's right. */ JS_ASSERT(compartment() == cx->compartment); - if (!Shape::newDictionaryList(cx, &lastProp)) + + uint32 span = slotSpan(); + + /* + * Clone the shapes into a new dictionary list. Don't update the + * last property of this object until done, otherwise a GC + * triggered while creating the dictionary will get the wrong + * slot span for this object. + */ + HeapPtrShape last; + last.init(lastProperty()); + if (!Shape::newDictionaryList(cx, &last)) return false; - clearOwnShape(); + JS_ASSERT(last->listp == &last); + last->listp = &shape_; + shape_ = last; + + JS_ASSERT(lastProperty()->hasTable()); + lastProperty()->base()->setSlotSpan(span); + return true; } @@ -541,10 +511,10 @@ NormalizeGetterAndSetter(JSContext *cx, JSObject *obj, setter = NULL; } if (flags & Shape::METHOD) { - /* Here, getter is the method, a function object reference. */ - JS_ASSERT(getter); + JS_ASSERT_IF(getter, getter == JS_PropertyStub); JS_ASSERT(!setter); JS_ASSERT(!(attrs & (JSPROP_GETTER | JSPROP_SETTER))); + getter = NULL; } else { if (getter == JS_PropertyStub) { JS_ASSERT(!(attrs & JSPROP_GETTER)); @@ -572,58 +542,49 @@ JSObject::checkShapeConsistency() return; JS_ASSERT(isNative()); - if (hasOwnShape()) - JS_ASSERT(objShape != lastProp->shapeid); - else - JS_ASSERT(objShape == lastProp->shapeid); - Shape *shape = lastProp; + Shape *shape = lastProperty(); Shape *prev = NULL; if (inDictionaryMode()) { - if (shape->hasTable()) { - PropertyTable *table = shape->getTable(); - for (uint32 fslot = table->freelist; fslot != SHAPE_INVALID_SLOT; - fslot = getSlot(fslot).toPrivateUint32()) { - JS_ASSERT(fslot < shape->slotSpan); - } + JS_ASSERT(shape->hasTable()); - for (int n = throttle; --n >= 0 && shape->parent; shape = shape->parent) { - JS_ASSERT_IF(shape != lastProp, !shape->hasTable()); - - Shape **spp = table->search(shape->propid, false); - JS_ASSERT(SHAPE_FETCH(spp) == shape); - } - } else { - shape = shape->parent; - for (int n = throttle; --n >= 0 && shape; shape = shape->parent) - JS_ASSERT(!shape->hasTable()); + PropertyTable &table = shape->table(); + for (uint32 fslot = table.freelist; fslot != SHAPE_INVALID_SLOT; + fslot = getSlot(fslot).toPrivateUint32()) { + JS_ASSERT(fslot < slotSpan()); } - shape = lastProp; + for (int n = throttle; --n >= 0 && shape->parent; shape = shape->parent) { + JS_ASSERT_IF(shape != lastProperty(), !shape->hasTable()); + + Shape **spp = table.search(shape->propid(), false); + JS_ASSERT(SHAPE_FETCH(spp) == shape); + } + + shape = lastProperty(); for (int n = throttle; --n >= 0 && shape; shape = shape->parent) { - JS_ASSERT_IF(shape->slot != SHAPE_INVALID_SLOT, shape->slot < shape->slotSpan); + JS_ASSERT_IF(shape->slot() != SHAPE_INVALID_SLOT, shape->slot() < slotSpan()); if (!prev) { - JS_ASSERT(shape == lastProp); - JS_ASSERT(shape->listp == &lastProp); + JS_ASSERT(shape == lastProperty()); + JS_ASSERT(shape->listp == &shape_); } else { JS_ASSERT(shape->listp == &prev->parent); - JS_ASSERT(prev->slotSpan >= shape->slotSpan); } prev = shape; } } else { for (int n = throttle; --n >= 0 && shape->parent; shape = shape->parent) { if (shape->hasTable()) { - PropertyTable *table = shape->getTable(); + PropertyTable &table = shape->table(); JS_ASSERT(shape->parent); for (Shape::Range r(shape); !r.empty(); r.popFront()) { - Shape **spp = table->search(r.front().propid, false); + Shape **spp = table.search(r.front().propid(), false); JS_ASSERT(SHAPE_FETCH(spp) == &r.front()); } } if (prev) { - JS_ASSERT(prev->slotSpan >= shape->slotSpan); + JS_ASSERT(prev->maybeSlot() >= shape->maybeSlot()); shape->kids.checkConsistency(prev); } prev = shape; @@ -634,11 +595,11 @@ JSObject::checkShapeConsistency() # define CHECK_SHAPE_CONSISTENCY(obj) ((void)0) #endif -const Shape * +Shape * JSObject::addProperty(JSContext *cx, jsid id, PropertyOp getter, StrictPropertyOp setter, uint32 slot, uintN attrs, - uintN flags, intN shortid) + uintN flags, intN shortid, bool allowDictionary) { JS_ASSERT(!JSID_IS_VOID(id)); @@ -652,28 +613,34 @@ JSObject::addProperty(JSContext *cx, jsid id, /* Search for id with adding = true in order to claim its entry. */ Shape **spp = nativeSearch(cx, id, true); JS_ASSERT(!SHAPE_FETCH(spp)); - return addPropertyInternal(cx, id, getter, setter, slot, attrs, flags, shortid, spp); + return addPropertyInternal(cx, id, getter, setter, slot, attrs, flags, shortid, spp, allowDictionary); } -const Shape * +Shape * JSObject::addPropertyInternal(JSContext *cx, jsid id, PropertyOp getter, StrictPropertyOp setter, uint32 slot, uintN attrs, uintN flags, intN shortid, - Shape **spp) + Shape **spp, bool allowDictionary) { - JS_ASSERT_IF(inDictionaryMode(), !lastProp->frozen()); + JS_ASSERT_IF(!allowDictionary, !inDictionaryMode()); PropertyTable *table = NULL; if (!inDictionaryMode()) { - if (lastProp->entryCount() >= PropertyTree::MAX_HEIGHT) { + bool stableSlot = + (slot == SHAPE_INVALID_SLOT) || + lastProperty()->hasMissingSlot() || + (slot == lastProperty()->maybeSlot() + 1); + JS_ASSERT_IF(!allowDictionary, stableSlot); + if (allowDictionary && + (!stableSlot || lastProperty()->entryCount() >= PropertyTree::MAX_HEIGHT)) { if (!toDictionaryMode(cx)) return NULL; spp = nativeSearch(cx, id, true); - table = lastProp->getTable(); + table = &lastProperty()->table(); } - } else if (lastProp->hasTable()) { - table = lastProp->getTable(); + } else if (lastProperty()->hasTable()) { + table = &lastProperty()->table(); if (table->needsToGrow()) { if (!table->grow(cx)) return NULL; @@ -683,25 +650,38 @@ JSObject::addPropertyInternal(JSContext *cx, jsid id, } } + if (!maybeSetIndexed(cx, id)) + return NULL; + /* Find or create a property tree node labeled by our arguments. */ - const Shape *shape; + Shape *shape; { - Shape child(id, getter, setter, slot, attrs, flags, shortid); - shape = getChildProperty(cx, lastProp, child); + BaseShape *nbase; + if (lastProperty()->base()->matchesGetterSetter(getter, setter)) { + nbase = lastProperty()->base(); + } else { + BaseShape base(getClass(), getParent(), lastProperty()->getObjectFlags(), + attrs, getter, setter); + nbase = BaseShape::getUnowned(cx, base); + if (!nbase) + return NULL; + } + + Shape child(nbase, id, slot, numFixedSlots(), attrs, flags, shortid); + shape = getChildProperty(cx, lastProperty(), child); } if (shape) { - JS_ASSERT(shape == lastProp); + JS_ASSERT(shape == lastProperty()); if (table) { /* Store the tree node pointer in the table entry for id. */ SHAPE_STORE_PRESERVING_COLLISION(spp, shape); ++table->entryCount; - /* Pass the table along to the new lastProp, namely shape. */ - JS_ASSERT(shape->parent->getTable() == table); - shape->parent->setTable(NULL); - shape->setTable(table); + /* Pass the table along to the new last property, namely shape. */ + JS_ASSERT(&shape->parent->table() == table); + shape->parent->handoffTableTo(shape); } CHECK_SHAPE_CONSISTENCY(this); @@ -729,14 +709,14 @@ CheckCanChangeAttrs(JSContext *cx, JSObject *obj, const Shape *shape, uintN *att /* Reject attempts to remove a slot from the permanent data property. */ if (shape->isDataDescriptor() && shape->hasSlot() && (*attrsp & (JSPROP_GETTER | JSPROP_SETTER | JSPROP_SHARED))) { - obj->reportNotConfigurable(cx, shape->propid); + obj->reportNotConfigurable(cx, shape->propid()); return false; } return true; } -const Shape * +Shape * JSObject::putProperty(JSContext *cx, jsid id, PropertyOp getter, StrictPropertyOp setter, uint32 slot, uintN attrs, @@ -744,16 +724,6 @@ JSObject::putProperty(JSContext *cx, jsid id, { JS_ASSERT(!JSID_IS_VOID(id)); - /* - * Horrid non-strict eval, debuggers, and |default xml namespace ...| may - * extend Call objects. - */ - if (lastProp->frozen()) { - if (!Shape::newDictionaryList(cx, &lastProp)) - return NULL; - JS_ASSERT(!lastProp->frozen()); - } - NormalizeGetterAndSetter(cx, this, id, attrs, flags, getter, setter); /* Search for id in order to claim its entry if table has been allocated. */ @@ -769,7 +739,7 @@ JSObject::putProperty(JSContext *cx, jsid id, return NULL; } - return addPropertyInternal(cx, id, getter, setter, slot, attrs, flags, shortid, spp); + return addPropertyInternal(cx, id, getter, setter, slot, attrs, flags, shortid, spp, true); } /* Property exists: search must have returned a valid *spp. */ @@ -784,15 +754,24 @@ JSObject::putProperty(JSContext *cx, jsid id, * other members match. */ bool hadSlot = shape->hasSlot(); - uint32 oldSlot = shape->slot; + uint32 oldSlot = shape->maybeSlot(); if (!(attrs & JSPROP_SHARED) && slot == SHAPE_INVALID_SLOT && hadSlot) slot = oldSlot; + UnownedBaseShape *nbase; + { + BaseShape base(getClass(), getParent(), lastProperty()->getObjectFlags(), + attrs, getter, setter); + nbase = BaseShape::getUnowned(cx, base); + if (!nbase) + return NULL; + } + /* * Now that we've possibly preserved slot, check whether all members match. * If so, this is a redundant "put" and we can return without more work. */ - if (shape->matchesParamsAfterId(getter, setter, slot, attrs, flags, shortid)) + if (shape->matchesParamsAfterId(nbase, slot, attrs, flags, shortid)) return shape; /* @@ -800,80 +779,80 @@ JSObject::putProperty(JSContext *cx, jsid id, * The shape tree is shared immutable, and we can't removeProperty and then * addPropertyInternal because a failure under add would lose data. */ - if (shape != lastProp && !inDictionaryMode()) { + if (shape != lastProperty() && !inDictionaryMode()) { if (!toDictionaryMode(cx)) return NULL; - spp = nativeSearch(cx, shape->propid); + spp = nativeSearch(cx, shape->propid()); shape = SHAPE_FETCH(spp); } + JS_ASSERT_IF(shape->hasSlot() && !(attrs & JSPROP_SHARED), shape->slot() == slot); + /* - * Now that we have passed the lastProp->frozen() check at the top of this - * method, and the non-last-property conditioning just above, we are ready - * to overwrite. - * - * Optimize the case of a non-frozen dictionary-mode object based on the - * property that dictionaries exclusively own their mutable shape structs, - * each of which has a unique shape number (not shared via a shape tree). + * Optimize the case of a dictionary-mode object based on the property that + * dictionaries exclusively own their mutable shape structs, each of which + * has a unique shape (not shared via a shape tree). We can update the + * shape in place, though after each modification we need to generate a new + * last property to invalidate shape guards. * * This is more than an optimization: it is required to preserve for-in * enumeration order (see bug 601399). */ if (inDictionaryMode()) { + bool updateLast = (shape == lastProperty()); + if (!generateOwnShape(cx)) + return NULL; + if (updateLast) + shape = lastProperty(); + /* FIXME bug 593129 -- slot allocation and JSObject *this must move out of here! */ if (slot == SHAPE_INVALID_SLOT && !(attrs & JSPROP_SHARED)) { if (!allocSlot(cx, &slot)) return NULL; } - shape->slot = slot; - if (slot != SHAPE_INVALID_SLOT && slot >= shape->slotSpan) { - shape->slotSpan = slot + 1; + if (shape == lastProperty()) + shape->base()->adoptUnowned(nbase); + else + shape->base_ = nbase; - for (Shape *temp = lastProp; temp != shape; temp = temp->parent) { - if (temp->slotSpan <= slot) - temp->slotSpan = slot + 1; - } - } - - shape->update(getter, setter, uint8(attrs)); + shape->setSlot(slot); + shape->attrs = uint8(attrs); shape->flags = flags | Shape::IN_DICTIONARY; - shape->shortid = int16(shortid); + shape->shortid_ = int16(shortid); /* - * We are done updating shape and lastProp. Now we may need to update - * flags and we will need to update objShape, which is no longer "own". - * In the last non-dictionary property case in the else clause just - * below, getChildProperty handles this for us. First update flags. + * We are done updating shape and the last property. Now we may need to + * update flags. In the last non-dictionary property case in the else + * clause just below, getChildProperty handles this for us. First update + * flags. */ - updateFlags(shape); - - /* - * We have just mutated shape in place, but nothing caches it based on - * shape->shape unless shape is lastProp and !hasOwnShape()). Therefore - * we regenerate only lastProp->shape. We will clearOwnShape(), which - * sets objShape to lastProp->shape. - */ - lastProp->shapeid = js_GenerateShape(cx); - clearOwnShape(); + jsuint index; + if (js_IdIsIndex(shape->propid(), &index)) + shape->base()->setObjectFlag(BaseShape::INDEXED); } else { /* - * Updating lastProp in a non-dictionary-mode object. Such objects - * share their shapes via a tree rooted at a prototype emptyShape, or - * perhaps a well-known compartment-wide singleton emptyShape. + * Updating the last property in a non-dictionary-mode object. Such + * objects share their shapes via a tree rooted at a prototype + * emptyShape, or perhaps a well-known compartment-wide singleton + * emptyShape. * * If any shape in the tree has a property hashtable, it is shared and * immutable too, therefore we must not update *spp. */ - JS_ASSERT(shape == lastProp); - removeLastProperty(); + BaseShape base(getClass(), getParent(), lastProperty()->getObjectFlags(), + attrs, getter, setter); + BaseShape *nbase = BaseShape::getUnowned(cx, base); + if (!nbase) + return NULL; + + JS_ASSERT(shape == lastProperty()); /* Find or create a property tree node labeled by our arguments. */ - Shape child(id, getter, setter, slot, attrs, flags, shortid); + Shape child(nbase, id, slot, numFixedSlots(), attrs, flags, shortid); + Shape *newShape = getChildProperty(cx, shape->parent, child); - Shape *newShape = getChildProperty(cx, lastProp, child); if (!newShape) { - setLastProperty(shape); CHECK_SHAPE_CONSISTENCY(this); return NULL; } @@ -884,14 +863,12 @@ JSObject::putProperty(JSContext *cx, jsid id, /* * Can't fail now, so free the previous incarnation's slot if the new shape * has no slot. But we do not need to free oldSlot (and must not, as trying - * to will botch an assertion in JSObject::freeSlot) if the new lastProp - * (shape here) has a slotSpan that does not cover it. + * to will botch an assertion in JSObject::freeSlot) if the new last + * property (shape here) has a slotSpan that does not cover it. */ if (hadSlot && !shape->hasSlot()) { - if (oldSlot < shape->slotSpan) + if (oldSlot < slotSpan()) freeSlot(cx, oldSlot); - else - setSlot(oldSlot, UndefinedValue()); JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals); } @@ -900,12 +877,10 @@ JSObject::putProperty(JSContext *cx, jsid id, return shape; } -const Shape * -JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN mask, +Shape * +JSObject::changeProperty(JSContext *cx, Shape *shape, uintN attrs, uintN mask, PropertyOp getter, StrictPropertyOp setter) { - JS_ASSERT_IF(inDictionaryMode(), !lastProp->frozen()); - JS_ASSERT(!JSID_IS_VOID(shape->propid)); JS_ASSERT(nativeContains(cx, *shape)); attrs |= shape->attrs & mask; @@ -914,12 +889,12 @@ JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN m JS_ASSERT(!((attrs ^ shape->attrs) & JSPROP_SHARED) || !(attrs & JSPROP_SHARED)); - /* Don't allow method properties to be changed to have a getter. */ - JS_ASSERT_IF(getter != shape->getter(), !shape->isMethod()); + /* Don't allow method properties to be changed to have a getter or setter. */ + JS_ASSERT_IF(shape->isMethod(), !getter && !setter); - types::MarkTypePropertyConfigured(cx, this, shape->propid); + types::MarkTypePropertyConfigured(cx, this, shape->propid()); if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) - types::AddTypePropertyId(cx, this, shape->propid, types::Type::UnknownType()); + types::AddTypePropertyId(cx, this, shape->propid(), types::Type::UnknownType()); if (getter == JS_PropertyStub) getter = NULL; @@ -932,66 +907,14 @@ JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN m if (shape->attrs == attrs && shape->getter() == getter && shape->setter() == setter) return shape; - const Shape *newShape; - /* - * Dictionary-mode objects exclusively own their mutable shape structs, so - * we simply modify in place. + * Let JSObject::putProperty handle this |overwriting| case, including + * the conservation of shape->slot (if it's valid). We must not call + * removeProperty because it will free an allocated shape->slot, and + * putProperty won't re-allocate it. */ - if (inDictionaryMode()) { - /* FIXME bug 593129 -- slot allocation and JSObject *this must move out of here! */ - uint32 slot = shape->slot; - if (slot == SHAPE_INVALID_SLOT && !(attrs & JSPROP_SHARED)) { - if (!allocSlot(cx, &slot)) - return NULL; - } - - Shape *mutableShape = const_cast(shape); - mutableShape->slot = slot; - if (slot != SHAPE_INVALID_SLOT && slot >= shape->slotSpan) { - mutableShape->slotSpan = slot + 1; - - for (Shape *temp = lastProp; temp != shape; temp = temp->parent) { - if (temp->slotSpan <= slot) - temp->slotSpan = slot + 1; - } - } - - mutableShape->update(getter, setter, uint8(attrs)); - - updateFlags(shape); - - /* See the corresponding code in putProperty. */ - lastProp->shapeid = js_GenerateShape(cx); - clearOwnShape(); - - newShape = mutableShape; - } else if (shape == lastProp) { - Shape child(shape->propid, getter, setter, shape->slot, attrs, shape->flags, - shape->shortid); - - newShape = getChildProperty(cx, shape->parent, child); -#ifdef DEBUG - if (newShape) { - JS_ASSERT(newShape == lastProp); - if (newShape->hasTable()) { - Shape **spp = nativeSearch(cx, shape->propid); - JS_ASSERT(SHAPE_FETCH(spp) == newShape); - } - } -#endif - } else { - /* - * Let JSObject::putProperty handle this |overwriting| case, including - * the conservation of shape->slot (if it's valid). We must not call - * removeProperty because it will free an allocated shape->slot, and - * putProperty won't re-allocate it. - */ - Shape child(shape->propid, getter, setter, shape->slot, attrs, shape->flags, - shape->shortid); - newShape = putProperty(cx, child.propid, child.getter(), child.setter(), child.slot, - child.attrs, child.flags, child.shortid); - } + Shape *newShape = putProperty(cx, shape->propid(), getter, setter, shape->maybeSlot(), + attrs, shape->flags, shape->maybeShortid()); CHECK_SHAPE_CONSISTENCY(this); return newShape; @@ -1005,120 +928,89 @@ JSObject::removeProperty(JSContext *cx, jsid id) if (!shape) return true; - /* First, if shape is unshared and not has a slot, free its slot number. */ - bool addedToFreelist = false; - bool hadSlot = shape->hasSlot(); - if (hadSlot) { - addedToFreelist = freeSlot(cx, shape->slot); - JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals); - } - - /* If shape is not the last property added, switch to dictionary mode. */ - if (shape != lastProp && !inDictionaryMode()) { + /* + * If shape is not the last property added, or the last property cannot + * be removed, switch to dictionary mode. + */ + if (!inDictionaryMode() && (shape != lastProperty() || !canRemoveLastProperty())) { if (!toDictionaryMode(cx)) return false; - spp = nativeSearch(cx, shape->propid); + spp = nativeSearch(cx, shape->propid()); shape = SHAPE_FETCH(spp); } + /* + * If in dictionary mode, get a new shape for the last property after the + * removal. We need a fresh shape for all dictionary deletions, even of + * the last property. Otherwise, a shape could replay and caches might + * return deleted DictionaryShapes! See bug 595365. Do this before changing + * the object or table, so the remaining removal is infallible. + */ + Shape *spare = NULL; + if (inDictionaryMode()) { + spare = js_NewGCShape(cx); + if (!spare) + return false; + new (spare) Shape(shape->base(), 0); + } + + /* If shape has a slot, free its slot number. */ + if (shape->hasSlot()) { + freeSlot(cx, shape->slot()); + JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals); + } + /* * A dictionary-mode object owns mutable, unique shapes on a non-circular - * doubly linked list, optionally hashed by lastProp->table. So we can edit - * the list and hash in place. + * doubly linked list, hashed by lastProp->table. So we can edit the list + * and hash in place. */ if (inDictionaryMode()) { - PropertyTable *table = lastProp->hasTable() ? lastProp->getTable() : NULL; + PropertyTable &table = lastProperty()->table(); if (SHAPE_HAD_COLLISION(*spp)) { - JS_ASSERT(table); *spp = SHAPE_REMOVED; - ++table->removedCount; - --table->entryCount; + ++table.removedCount; + --table.entryCount; } else { - if (table) { - *spp = NULL; - --table->entryCount; + *spp = NULL; + --table.entryCount; #ifdef DEBUG - /* - * Check the consistency of the table but limit the number of - * checks not to alter significantly the complexity of the - * delete in debug builds, see bug 534493. - */ - const Shape *aprop = lastProp; - for (int n = 50; --n >= 0 && aprop->parent; aprop = aprop->parent) - JS_ASSERT_IF(aprop != shape, nativeContains(cx, *aprop)); + /* + * Check the consistency of the table but limit the number of + * checks not to alter significantly the complexity of the + * delete in debug builds, see bug 534493. + */ + const Shape *aprop = lastProperty(); + for (int n = 50; --n >= 0 && aprop->parent; aprop = aprop->parent) + JS_ASSERT_IF(aprop != shape, nativeContains(cx, *aprop)); #endif - } } - /* - * Remove shape from its non-circular doubly linked list, setting this - * object's OWN_SHAPE flag so the updateShape(cx) further below will - * generate a fresh shape id for this object, distinct from the id of - * any shape in the list. We need a fresh shape for all deletions, even - * of lastProp. Otherwise, a shape number could replay and caches might - * return get deleted DictionaryShapes! See bug 595365. - */ - flags |= OWN_SHAPE; - - Shape *oldLastProp = lastProp; + /* Remove shape from its non-circular doubly linked list. */ + Shape *oldLastProp = lastProperty(); shape->removeFromDictionary(this); - if (table) { - if (shape == oldLastProp) { - JS_ASSERT(shape->getTable() == table); - JS_ASSERT(shape->parent == lastProp); - JS_ASSERT(shape->slotSpan >= lastProp->slotSpan); - JS_ASSERT_IF(hadSlot, shape->slot + 1 <= shape->slotSpan); - /* - * Maintain slot freelist consistency. Slot numbers on the - * freelist are less than lastProp->slotSpan; so if the - * freelist is non-empty, then lastProp->slotSpan may not - * decrease. - */ - if (table->freelist != SHAPE_INVALID_SLOT) { - lastProp->slotSpan = shape->slotSpan; + /* Hand off table from the old to new last property. */ + oldLastProp->handoffTableTo(lastProperty()); - /* - * Add the slot to the freelist if it wasn't added in - * freeSlot and it is not a reserved slot. - */ - if (hadSlot && !addedToFreelist && JSSLOT_FREE(clasp) <= shape->slot) { - setSlot(shape->slot, PrivateUint32Value(table->freelist)); - table->freelist = shape->slot; - } - } - } + /* Generate a new shape for the object, infallibly. */ + JS_ALWAYS_TRUE(generateOwnShape(cx, spare)); - /* Hand off table from old to new lastProp. */ - oldLastProp->setTable(NULL); - lastProp->setTable(table); - } + /* Consider shrinking table if its load factor is <= .25. */ + uint32 size = table.capacity(); + if (size > PropertyTable::MIN_SIZE && table.entryCount <= size >> 2) + (void) table.change(-1, cx); } else { /* * Non-dictionary-mode property tables are shared immutables, so all we - * need do is retract lastProp and we'll either get or else lazily make - * via a later hashify the exact table for the new property lineage. + * need do is retract the last property and we'll either get or else + * lazily make via a later hashify the exact table for the new property + * lineage. */ - JS_ASSERT(shape == lastProp); - removeLastProperty(); - } - updateShape(cx); - - /* On the way out, consider shrinking table if its load factor is <= .25. */ - if (lastProp->hasTable()) { - PropertyTable *table = lastProp->getTable(); - uint32 size = table->capacity(); - if (size > PropertyTable::MIN_SIZE && table->entryCount <= size >> 2) - (void) table->change(-1, cx); - } - - /* Also, consider shrinking object slots if 25% or more are unused. */ - if (hasSlotsArray()) { - JS_ASSERT(slotSpan() <= numSlots()); - if ((slotSpan() + (slotSpan() >> 2)) < numSlots()) - shrinkSlots(cx, slotSpan()); + JS_ASSERT(shape == lastProperty()); + removeLastProperty(cx); } CHECK_SHAPE_CONSISTENCY(this); @@ -1128,7 +1020,7 @@ JSObject::removeProperty(JSContext *cx, jsid id) void JSObject::clear(JSContext *cx) { - Shape *shape = lastProp; + Shape *shape = lastProperty(); JS_ASSERT(inDictionaryMode() == shape->inDictionary()); while (shape->parent) { @@ -1138,14 +1030,9 @@ JSObject::clear(JSContext *cx) JS_ASSERT(shape->isEmptyShape()); if (inDictionaryMode()) - shape->listp = &lastProp; + shape->listp = &shape_; - /* - * We have rewound to a uniquely-shaped empty scope, so we don't need an - * override for this object's shape. - */ - clearOwnShape(); - setMap(shape); + JS_ALWAYS_TRUE(setLastProperty(cx, shape)); JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals); CHECK_SHAPE_CONSISTENCY(this); @@ -1154,105 +1041,411 @@ JSObject::clear(JSContext *cx) void JSObject::rollbackProperties(JSContext *cx, uint32 slotSpan) { - /* Remove properties from this object until it has a matching slot span. */ - JS_ASSERT(!inDictionaryMode() && !hasSlotsArray() && slotSpan <= this->slotSpan()); + /* + * Remove properties from this object until it has a matching slot span. + * The object cannot have escaped in a way which would prevent safe + * removal of the last properties. + */ + JS_ASSERT(!inDictionaryMode() && slotSpan <= this->slotSpan()); while (this->slotSpan() != slotSpan) { - JS_ASSERT(lastProp->hasSlot() && getSlot(lastProp->slot).isUndefined()); - removeLastProperty(); + JS_ASSERT(lastProperty()->hasSlot() && getSlot(lastProperty()->slot()).isUndefined()); + removeLastProperty(cx); } - updateShape(cx); } -void -JSObject::generateOwnShape(JSContext *cx) +bool +JSObject::generateOwnShape(JSContext *cx, Shape *newShape) { - setOwnShape(js_GenerateShape(cx)); + if (!inDictionaryMode() && !toDictionaryMode(cx)) + return false; + + if (!newShape) { + newShape = js_NewGCShape(cx); + if (!newShape) + return false; + new (newShape) Shape(lastProperty()->base(), 0); + } + + PropertyTable &table = lastProperty()->table(); + Shape **spp = lastProperty()->isEmptyShape() ? NULL : table.search(lastProperty()->maybePropid(), false); + + Shape *oldShape = lastProperty(); + newShape->initDictionaryShape(*oldShape, &shape_); + + JS_ASSERT(newShape->parent == oldShape); + oldShape->removeFromDictionary(this); + + oldShape->handoffTableTo(newShape); + + if (spp) + SHAPE_STORE_PRESERVING_COLLISION(spp, newShape); + return true; } -void -JSObject::deletingShapeChange(JSContext *cx, const Shape &shape) -{ - JS_ASSERT(!JSID_IS_VOID(shape.propid)); - generateOwnShape(cx); -} - -const Shape * +Shape * JSObject::methodShapeChange(JSContext *cx, const Shape &shape) { - const Shape *result = &shape; + JS_ASSERT(shape.isMethod()); + + if (!inDictionaryMode() && !toDictionaryMode(cx)) + return NULL; + + Shape *spare = js_NewGCShape(cx); + if (!spare) + return NULL; + new (spare) Shape(shape.base(), 0); - JS_ASSERT(!JSID_IS_VOID(shape.propid)); - if (shape.isMethod()) { #ifdef DEBUG - const Value &prev = nativeGetSlot(shape.slot); - JS_ASSERT(shape.methodObject() == prev.toObject()); - JS_ASSERT(canHaveMethodBarrier()); - JS_ASSERT(hasMethodBarrier()); - JS_ASSERT(!shape.setter()); + JS_ASSERT(canHaveMethodBarrier()); + JS_ASSERT(!shape.setter()); + JS_ASSERT(!shape.hasShortID()); #endif - /* - * Pass null to make a stub getter, but pass along shape.setter() to - * preserve watchpoints. Clear Shape::METHOD from flags as we are - * despecializing from a method memoized in the property tree to a - * plain old function-valued property. - */ - result = putProperty(cx, shape.propid, NULL, shape.setter(), shape.slot, - shape.attrs, - shape.getFlags() & ~Shape::METHOD, - shape.shortid); - if (!result) - return NULL; - } + /* + * Clear Shape::METHOD from flags as we are despecializing from a + * method memoized in the property tree to a plain old function-valued + * property. + */ + Shape *result = + putProperty(cx, shape.propid(), NULL, NULL, shape.slot(), + shape.attrs, + shape.getFlags() & ~Shape::METHOD, + 0); + if (!result) + return NULL; - if (branded()) { - uintN thrashCount = getMethodThrashCount(); - if (thrashCount < JSObject::METHOD_THRASH_COUNT_MAX) { - ++thrashCount; - setMethodThrashCount(thrashCount); - if (thrashCount == JSObject::METHOD_THRASH_COUNT_MAX) { - unbrand(cx); - return result; - } - } - } + if (result != lastProperty()) + JS_ALWAYS_TRUE(generateOwnShape(cx, spare)); - generateOwnShape(cx); return result; } bool -JSObject::methodShapeChange(JSContext *cx, uint32 slot) -{ - if (!hasMethodBarrier()) { - generateOwnShape(cx); - } else { - for (Shape::Range r = lastProp->all(); !r.empty(); r.popFront()) { - const Shape &shape = r.front(); - JS_ASSERT(!JSID_IS_VOID(shape.propid)); - if (shape.slot == slot) - return methodShapeChange(cx, shape) != NULL; - } - } - return true; -} - -void -JSObject::protoShapeChange(JSContext *cx) -{ - generateOwnShape(cx); -} - -void JSObject::shadowingShapeChange(JSContext *cx, const Shape &shape) { - JS_ASSERT(!JSID_IS_VOID(shape.propid)); - generateOwnShape(cx); + return generateOwnShape(cx); } bool -JSObject::globalObjectOwnShapeChange(JSContext *cx) +JSObject::clearParent(JSContext *cx) { - generateOwnShape(cx); - return !js_IsPropertyCacheDisabled(cx); + return setParent(cx, NULL); +} + +bool +JSObject::setParent(JSContext *cx, JSObject *parent) +{ + if (parent && !parent->setDelegate(cx)) + return false; + + if (inDictionaryMode()) { + lastProperty()->base()->setParent(parent); + return true; + } + + return Shape::setObjectParent(cx, parent, getProto(), &shape_); +} + +/* static */ bool +Shape::setObjectParent(JSContext *cx, JSObject *parent, JSObject *proto, HeapPtrShape *listp) +{ + if ((*listp)->getObjectParent() == parent) + return true; + + BaseShape base(*(*listp)->base()->unowned()); + base.setParent(parent); + + return replaceLastProperty(cx, base, proto, listp); +} + +bool +JSObject::preventExtensions(JSContext *cx, js::AutoIdVector *props) +{ + JS_ASSERT(isExtensible()); + + if (props) { + if (js::FixOp fix = getOps()->fix) { + bool success; + if (!fix(cx, this, &success, props)) + return false; + if (!success) { + JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_CANT_CHANGE_EXTENSIBILITY); + return false; + } + } else { + if (!js::GetPropertyNames(cx, this, JSITER_HIDDEN | JSITER_OWNONLY, props)) + return false; + } + } + + return setFlag(cx, BaseShape::NOT_EXTENSIBLE, GENERATE_SHAPE); +} + +bool +JSObject::setFlag(JSContext *cx, /*BaseShape::Flag*/ uint32 flag_, GenerateShape generateShape) +{ + BaseShape::Flag flag = (BaseShape::Flag) flag_; + + if (lastProperty()->getObjectFlags() & flag) + return true; + + if (inDictionaryMode()) { + if (generateShape == GENERATE_SHAPE && !generateOwnShape(cx)) + return false; + lastProperty()->base()->setObjectFlag(flag); + return true; + } + + return Shape::setObjectFlag(cx, flag, getProto(), &shape_); +} + +/* static */ bool +Shape::setObjectFlag(JSContext *cx, BaseShape::Flag flag, JSObject *proto, HeapPtrShape *listp) +{ + if ((*listp)->getObjectFlags() & flag) + return true; + + BaseShape base(*(*listp)->base()->unowned()); + base.flags |= flag; + + return replaceLastProperty(cx, base, proto, listp); +} + +/* static */ inline HashNumber +BaseShapeEntry::hash(const js::BaseShape *base) +{ + JS_ASSERT(!base->isOwned()); + + JSDHashNumber hash = base->flags; + hash = JS_ROTATE_LEFT32(hash, 4) ^ (jsuword(base->clasp) >> 3); + hash = JS_ROTATE_LEFT32(hash, 4) ^ (jsuword(base->parent.get()) >> 3); + if (base->rawGetter) + hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(base->rawGetter); + if (base->rawSetter) + hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(base->rawSetter); + return hash; +} + +/* static */ inline bool +BaseShapeEntry::match(UnownedBaseShape *key, const BaseShape *lookup) +{ + JS_ASSERT(!lookup->isOwned()); + + return key->flags == lookup->flags + && key->clasp == lookup->clasp + && key->parent == lookup->parent + && key->getterObj == lookup->getterObj + && key->setterObj == lookup->setterObj; +} + +/* static */ UnownedBaseShape * +BaseShape::getUnowned(JSContext *cx, const BaseShape &base) +{ + BaseShapeSet &table = cx->compartment->baseShapes; + + if (!table.initialized() && !table.init()) + return NULL; + + BaseShapeSet::AddPtr p = table.lookupForAdd(&base); + + if (p) { + UnownedBaseShape *base = *p; + + if (cx->compartment->needsBarrier()) + BaseShape::readBarrier(base); + + return base; + } + + BaseShape *nbase_ = js_NewGCBaseShape(cx); + if (!nbase_) + return NULL; + new (nbase_) BaseShape(base); + + UnownedBaseShape *nbase = static_cast(nbase_); + + if (!table.relookupOrAdd(p, &base, nbase)) + return NULL; + + return nbase; +} + +void +JSCompartment::sweepBaseShapeTable(JSContext *cx) +{ + if (baseShapes.initialized()) { + for (BaseShapeSet::Enum e(baseShapes); !e.empty(); e.popFront()) { + UnownedBaseShape *base = e.front(); + if (!base->isMarked()) + e.removeFront(); + } + } +} + +void +BaseShape::finalize(JSContext *cx, bool background) +{ + if (table_) { + cx->delete_(table_); + table_ = NULL; + } +} + +/* static */ bool +Shape::setExtensibleParents(JSContext *cx, HeapPtrShape *listp) +{ + Shape *shape = *listp; + JS_ASSERT(!shape->inDictionary()); + + BaseShape base(*shape->base()->unowned()); + base.flags |= BaseShape::EXTENSIBLE_PARENTS; + + /* This is only used for Block and Call objects, which have a NULL proto. */ + return replaceLastProperty(cx, base, NULL, listp); +} + +bool +Bindings::setExtensibleParents(JSContext *cx) +{ + if (!ensureShape(cx)) + return false; + return Shape::setExtensibleParents(cx, &lastBinding); +} + +bool +Bindings::setParent(JSContext *cx, JSObject *obj) +{ + if (!ensureShape(cx)) + return false; + + /* This is only used for Block objects, which have a NULL proto. */ + return Shape::setObjectParent(cx, obj, NULL, &lastBinding); +} + +/* static */ inline HashNumber +InitialShapeEntry::hash(const Lookup &lookup) +{ + JSDHashNumber hash = jsuword(lookup.clasp) >> 3; + hash = JS_ROTATE_LEFT32(hash, 4) ^ (jsuword(lookup.proto) >> 3); + hash = JS_ROTATE_LEFT32(hash, 4) ^ (jsuword(lookup.parent) >> 3); + return hash + lookup.nfixed; +} + +/* static */ inline bool +InitialShapeEntry::match(const InitialShapeEntry &key, const Lookup &lookup) +{ + return lookup.clasp == key.shape->getObjectClass() + && lookup.proto == key.proto + && lookup.parent == key.shape->getObjectParent() + && lookup.nfixed == key.shape->numFixedSlots() + && lookup.baseFlags == key.shape->getObjectFlags(); +} + +/* static */ Shape * +EmptyShape::getInitialShape(JSContext *cx, Class *clasp, JSObject *proto, JSObject *parent, + AllocKind kind, uint32 objectFlags) +{ + InitialShapeSet &table = cx->compartment->initialShapes; + + if (!table.initialized() && !table.init()) + return NULL; + + size_t nfixed = GetGCKindSlots(kind, clasp); + InitialShapeEntry::Lookup lookup(clasp, proto, parent, nfixed, objectFlags); + + InitialShapeSet::AddPtr p = table.lookupForAdd(lookup); + + if (p) { + Shape *shape = p->shape; + + if (cx->compartment->needsBarrier()) + Shape::readBarrier(shape); + + return shape; + } + + BaseShape base(clasp, parent, objectFlags); + BaseShape *nbase = BaseShape::getUnowned(cx, base); + if (!nbase) + return NULL; + + Shape *shape = JS_PROPERTY_TREE(cx).newShape(cx); + if (!shape) + return NULL; + new (shape) EmptyShape(nbase, nfixed); + + InitialShapeEntry entry; + entry.shape = shape; + entry.proto = proto; + + if (!table.relookupOrAdd(p, lookup, entry)) + return NULL; + + return shape; +} + +void +NewObjectCache::invalidateEntriesForShape(JSContext *cx, Shape *shape, JSObject *proto) +{ + Class *clasp = shape->getObjectClass(); + + gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots()); + if (CanBeFinalizedInBackground(kind, clasp)) + kind = GetBackgroundAllocKind(kind); + + GlobalObject *global = shape->getObjectParent()->getGlobal(); + types::TypeObject *type = proto->getNewType(cx); + + EntryIndex entry; + if (lookupGlobal(clasp, global, kind, &entry)) + PodZero(&entries[entry]); + if (!proto->isGlobal() && lookupProto(clasp, proto, kind, &entry)) + PodZero(&entries[entry]); + if (lookupType(clasp, type, kind, &entry)) + PodZero(&entries[entry]); +} + +/* static */ void +EmptyShape::insertInitialShape(JSContext *cx, Shape *shape, JSObject *proto) +{ + InitialShapeEntry::Lookup lookup(shape->getObjectClass(), proto, shape->getObjectParent(), + shape->numFixedSlots(), shape->getObjectFlags()); + + InitialShapeSet::Ptr p = cx->compartment->initialShapes.lookup(lookup); + JS_ASSERT(p); + + InitialShapeEntry &entry = const_cast(*p); + JS_ASSERT(entry.shape->isEmptyShape()); + + /* The new shape had better be rooted at the old one. */ +#ifdef DEBUG + const Shape *nshape = shape; + while (!nshape->isEmptyShape()) + nshape = nshape->previous(); + JS_ASSERT(nshape == entry.shape); +#endif + + entry.shape = shape; + + /* + * This affects the shape that will be produced by the various NewObject + * methods, so clear any cache entry referring to the old shape. This is + * not required for correctness (though it may bust on the above asserts): + * the NewObject must always check for a nativeEmpty() result and generate + * the appropriate properties if found. Clearing the cache entry avoids + * this duplicate regeneration. + */ + cx->compartment->newObjectCache.invalidateEntriesForShape(cx, shape, proto); +} + +void +JSCompartment::sweepInitialShapeTable(JSContext *cx) +{ + if (initialShapes.initialized()) { + for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) { + const InitialShapeEntry &entry = e.front(); + if (!entry.shape->isMarked() || (entry.proto && !entry.proto->isMarked())) + e.removeFront(); + } + } } diff --git a/js/src/jsscope.h b/js/src/jsscope.h index b3c8b4e2bf90..e0f15fc0b0c9 100644 --- a/js/src/jsscope.h +++ b/js/src/jsscope.h @@ -51,7 +51,6 @@ #include "jstypes.h" #include "jscntxt.h" -#include "jscompartment.h" #include "jsobj.h" #include "jsprvtd.h" #include "jspubtd.h" @@ -129,12 +128,6 @@ * a harsh mistress, and we now convert the scope to a "dictionary" on first * delete, to avoid O(n^2) growth in the property tree. * - * What about thread safety? If the property tree operations done by requests - * are find-node and insert-node, then the only hazard is duplicate insertion. - * This is harmless except for minor bloat. When all requests have ended or - * been suspended, the GC is free to sweep the tree after marking all nodes - * reachable from scopes, performing remove-node operations as needed. - * * Is the property tree worth it compared to property storage in each table's * entries? To decide, we must find the relation <> between the words used * with a property tree and the words required without a tree. @@ -181,8 +174,7 @@ * lists if at most one property Y follows X in all scopes. In or near such a * case, we waste a word on the right-sibling link outside of the root ply of * the tree. Note also that the root ply tends to be large, so O(n^2) growth - * searching it is likely, indicating the need for hashing (but with increased - * thread safety costs). + * searching it is likely, indicating the need for hashing. * * If only K out of N nodes in the property tree have more than one child, we * could eliminate the sibling link and overlay a children list or hash-table @@ -205,22 +197,23 @@ * scope->table isn't worth it. So instead of always allocating scope->table, * we leave it null while initializing all the other scope members as if it * were non-null and minimal-length. Until a scope is searched - * MAX_LINEAR_SEARCHES times, we use linear search from obj->lastProp to find a + * LINEAR_SEARCHES_MAX times, we use linear search from obj->lastProp to find a * given id, and save on the time and space overhead of creating a hash table. * Also, we don't create tables for property tree Shapes that have shape * lineages smaller than MIN_ENTRIES. */ -#define SHAPE_INVALID_SLOT 0xffffffff - namespace js { +/* Limit on the number of slotful properties in an object. */ +static const uint32 SHAPE_INVALID_SLOT = JS_BIT(24) - 1; +static const uint32 SHAPE_MAXIMUM_SLOT = JS_BIT(24) - 2; + /* * Shapes use multiplicative hashing, _a la_ jsdhash.[ch], but specialized to * minimize footprint. */ struct PropertyTable { - static const uint32 MAX_LINEAR_SEARCHES = 7; static const uint32 MIN_ENTRIES = 7; static const uint32 MIN_SIZE_LOG2 = 4; static const uint32 MIN_SIZE = JS_BIT(MIN_SIZE_LOG2); @@ -293,17 +286,211 @@ namespace js { class PropertyTree; -static inline PropertyOp -CastAsPropertyOp(js::Class *clasp) -{ - return JS_DATA_TO_FUNC_PTR(PropertyOp, clasp); -} - /* * Reuse the API-only JSPROP_INDEX attribute to mean shadowability. */ #define JSPROP_SHADOWABLE JSPROP_INDEX +/* + * Shapes encode information about both a property lineage *and* a particular + * property. This information is split across the Shape and the BaseShape + * at shape->base(). Both Shape and BaseShape can be either owned or unowned + * by, respectively, the Object or Shape referring to them. + * + * Owned Shapes are used in dictionary objects, and form a doubly linked list + * whose entries are all owned by that dictionary. Unowned Shapes are all in + * the property tree. + * + * Owned BaseShapes are used for shapes which have property tables, including + * the last properties in all dictionaries. Unowned BaseShapes compactly store + * information common to many shapes. In a given compartment there is a single + * BaseShape for each combination of BaseShape information. This information + * is cloned in owned BaseShapes so that information can be quickly looked up + * for a given object or shape without regard to whether the base shape is + * owned or not. + * + * All combinations of owned/unowned Shapes/BaseShapes are possible: + * + * Owned Shape, Owned BaseShape: + * + * Last property in a dictionary object. The BaseShape is transferred from + * property to property as the object's last property changes. + * + * Owned Shape, Unowned BaseShape: + * + * Property in a dictionary object other than the last one. + * + * Unowned Shape, Owned BaseShape: + * + * Property in the property tree which has a property table. + * + * Unowned Shape, Unowned BaseShape: + * + * Property in the property tree which does not have a property table. + * + * BaseShapes additionally encode some information about the referring object + * itself. This includes the object's class, parent and various flags that may + * be set for the object. Except for the class, this information is mutable and + * may change when the object has an established property lineage. On such + * changes the entire property lineage is not updated, but rather only the + * last property (and its base shape). This works because only the object's + * last property is used to query information about the object. Care must be + * taken to call JSObject::canRemoveLastProperty when unwinding an object to + * an earlier property, however. + */ + +class UnownedBaseShape; + +class BaseShape : public js::gc::Cell +{ + public: + friend struct Shape; + friend struct BaseShapeEntry; + + enum Flag { + /* Owned by the referring shape. */ + OWNED_SHAPE = 0x1, + + /* getterObj/setterObj are active in unions below. */ + HAS_GETTER_OBJECT = 0x2, + HAS_SETTER_OBJECT = 0x4, + + /* + * Flags set which describe the referring object. Once set these cannot + * be unset, and are transferred from shape to shape as the object's + * last property changes. + */ + + EXTENSIBLE_PARENTS = 0x8, + DELEGATE = 0x10, + SYSTEM = 0x20, + NOT_EXTENSIBLE = 0x40, + INDEXED = 0x80, + BOUND_FUNCTION = 0x100, + VAROBJ = 0x200, + WATCHED = 0x400, + ITERATED_SINGLETON = 0x800, + NEW_TYPE_UNKNOWN = 0x1000, + UNCACHEABLE_PROTO = 0x2000, + + OBJECT_FLAG_MASK = 0x3ff8 + }; + + private: + Class *clasp; /* Class of referring object. */ + HeapPtrObject parent; /* Parent of referring object. */ + uint32 flags; /* Vector of above flags. */ + uint32 slotSpan_; /* Object slot span for BaseShapes at + * dictionary last properties. */ + + union { + js::PropertyOp rawGetter; /* getter hook for shape */ + JSObject *getterObj; /* user-defined callable "get" object or + null if shape->hasGetterValue() */ + }; + + union { + js::StrictPropertyOp rawSetter; /* setter hook for shape */ + JSObject *setterObj; /* user-defined callable "set" object or + null if shape->hasSetterValue() */ + }; + + /* For owned BaseShapes, the canonical unowned BaseShape. */ + HeapPtr unowned_; + + /* For owned BaseShapes, the shape's property table. */ + PropertyTable *table_; + + public: + void finalize(JSContext *cx, bool background); + + inline BaseShape(Class *clasp, JSObject *parent, uint32 objectFlags); + inline BaseShape(Class *clasp, JSObject *parent, uint32 objectFlags, + uint8 attrs, PropertyOp rawGetter, StrictPropertyOp rawSetter); + + bool isOwned() const { return !!(flags & OWNED_SHAPE); } + + inline bool matchesGetterSetter(PropertyOp rawGetter, + StrictPropertyOp rawSetter) const; + + inline void adoptUnowned(UnownedBaseShape *other); + inline void setOwned(UnownedBaseShape *unowned); + + inline void setParent(JSObject *obj); + JSObject *getObjectParent() { return parent; } + + void setObjectFlag(Flag flag) { JS_ASSERT(!(flag & ~OBJECT_FLAG_MASK)); flags |= flag; } + + bool hasGetterObject() const { return !!(flags & HAS_GETTER_OBJECT); } + JSObject *getterObject() const { JS_ASSERT(hasGetterObject()); return getterObj; } + + bool hasSetterObject() const { return !!(flags & HAS_SETTER_OBJECT); } + JSObject *setterObject() const { JS_ASSERT(hasSetterObject()); return setterObj; } + + bool hasTable() const { JS_ASSERT_IF(table_, isOwned()); return table_ != NULL; } + PropertyTable &table() const { JS_ASSERT(table_ && isOwned()); return *table_; } + void setTable(PropertyTable *table) { JS_ASSERT(isOwned()); table_ = table; } + + uint32 slotSpan() const { JS_ASSERT(isOwned()); return slotSpan_; } + void setSlotSpan(uint32 slotSpan) { JS_ASSERT(isOwned()); slotSpan_ = slotSpan; } + + /* Lookup base shapes from the compartment's baseShapes table. */ + static UnownedBaseShape *getUnowned(JSContext *cx, const BaseShape &base); + + /* Get the canonical base shape. */ + inline UnownedBaseShape *unowned(); + + /* Get the canonical base shape for an owned one. */ + inline UnownedBaseShape *baseUnowned(); + + /* Get the canonical base shape for an unowned one (i.e. identity). */ + inline UnownedBaseShape *toUnowned(); + + /* For JIT usage */ + static inline size_t offsetOfClass() { return offsetof(BaseShape, clasp); } + static inline size_t offsetOfParent() { return offsetof(BaseShape, parent); } + static inline size_t offsetOfFlags() { return offsetof(BaseShape, flags); } + + static inline void writeBarrierPre(BaseShape *shape); + static inline void writeBarrierPost(BaseShape *shape, void *addr); + static inline void readBarrier(BaseShape *shape); + + private: + static void staticAsserts() { + JS_STATIC_ASSERT(offsetof(BaseShape, clasp) == offsetof(js::shadow::BaseShape, clasp)); + } +}; + +class UnownedBaseShape : public BaseShape {}; + +UnownedBaseShape * +BaseShape::unowned() +{ + return isOwned() ? baseUnowned() : toUnowned(); +} + +UnownedBaseShape * +BaseShape::toUnowned() +{ + JS_ASSERT(!isOwned() && !unowned_); return static_cast(this); +} + +UnownedBaseShape * +BaseShape::baseUnowned() +{ + JS_ASSERT(isOwned() && unowned_); return unowned_; +} + +/* Entries for the per-compartment baseShapes set of unowned base shapes. */ +struct BaseShapeEntry +{ + typedef const BaseShape *Lookup; + + static inline HashNumber hash(const BaseShape *base); + static inline bool match(UnownedBaseShape *key, const BaseShape *lookup); +}; +typedef HashSet BaseShapeSet; + struct Shape : public js::gc::Cell { friend struct ::JSObject; @@ -312,155 +499,98 @@ struct Shape : public js::gc::Cell friend class js::Bindings; friend bool IsShapeAboutToBeFinalized(JSContext *cx, const js::Shape *shape); - mutable uint32 shapeid; /* shape identifier */ - uint32 slotSpan; /* one more than maximum live slot number */ - - /* - * numLinearSearches starts at zero and is incremented initially on each - * search() call. Once numLinearSearches reaches MAX_LINEAR_SEARCHES - * (which is a small integer), the table is created on the next search() - * call, and the table pointer will be easily distinguishable from a small - * integer. The table can also be created when hashifying for dictionary - * mode. - */ - union { - mutable size_t numLinearSearches; - mutable js::PropertyTable *table; - }; - - inline void freeTable(JSContext *cx); - - HeapId propid; - protected: - union { - PropertyOp rawGetter; /* getter and setter hooks or objects */ - JSObject *getterObj; /* user-defined callable "get" object or - null if shape->hasGetterValue(); or - joined function object if METHOD flag - is set. */ - js::Class *clasp; /* prototype class for empty scope */ - }; + HeapPtrBaseShape base_; + HeapId propid_; - union { - StrictPropertyOp rawSetter; /* getter is JSObject* and setter is 0 - if shape->isMethod() */ - JSObject *setterObj; /* user-defined callable "set" object or - null if shape->hasSetterValue() */ - }; + JS_ENUM_HEADER(SlotInfo, uint32) + { + /* Number of fixed slots in objects with this shape. */ + FIXED_SLOTS_MAX = 0x1f, + FIXED_SLOTS_SHIFT = 27, + FIXED_SLOTS_MASK = FIXED_SLOTS_MAX << FIXED_SLOTS_SHIFT, - public: - uint32 slot; /* abstract index in object slots */ - private: + /* + * numLinearSearches starts at zero and is incremented initially on + * search() calls. Once numLinearSearches reaches LINEAR_SEARCHES_MAX, + * the table is created on the next search() call. The table can also + * be created when hashifying for dictionary mode. + */ + LINEAR_SEARCHES_MAX = 0x7, + LINEAR_SEARCHES_SHIFT = 24, + LINEAR_SEARCHES_MASK = LINEAR_SEARCHES_MAX << LINEAR_SEARCHES_SHIFT, + + /* + * Mask to get the index in object slots for shapes which hasSlot(). + * For !hasSlot() shapes in the property tree with a parent, stores the + * parent's slot index (which may be invalid), and invalid for all + * other shapes. + */ + SLOT_MASK = JS_BIT(24) - 1 + } JS_ENUM_FOOTER(SlotInfo); + + uint32 slotInfo; /* mask of above info */ uint8 attrs; /* attributes, see jsapi.h JSPROP_* */ - mutable uint8 flags; /* flags, see below for defines */ - public: - int16 shortid; /* tinyid, or local arg/var index */ + uint8 flags; /* flags, see below for defines */ + int16 shortid_; /* tinyid, or local arg/var index */ - protected: - mutable HeapPtrShape parent; /* parent node, reverse for..in order */ + HeapPtrShape parent; /* parent node, reverse for..in order */ /* kids is valid when !inDictionary(), listp is valid when inDictionary(). */ union { - mutable js::KidsPointer kids; /* null, single child, or a tagged ptr - to many-kids data structure */ - mutable HeapPtrShape *listp; /* dictionary list starting at lastProp - has a double-indirect back pointer, - either to shape->parent if not last, - else to obj->lastProp */ + KidsPointer kids; /* null, single child, or a tagged ptr + to many-kids data structure */ + HeapPtrShape *listp; /* dictionary list starting at lastProp + has a double-indirect back pointer, + either to shape->parent if not last, + else to obj->lastProp */ }; - static inline js::Shape **search(JSContext *cx, HeapPtr *startp, jsid id, - bool adding = false); - static js::Shape *newDictionaryShape(JSContext *cx, const js::Shape &child, - HeapPtr *listp); - static js::Shape *newDictionaryList(JSContext *cx, HeapPtr *listp); + static inline Shape **search(JSContext *cx, HeapPtrShape *pstart, jsid id, + bool adding = false); + static js::Shape *newDictionaryList(JSContext *cx, HeapPtrShape *listp); - inline void removeFromDictionary(JSObject *obj) const; - inline void insertIntoDictionary(HeapPtr *dictp); + inline void removeFromDictionary(JSObject *obj); + inline void insertIntoDictionary(HeapPtrShape *dictp); - js::Shape *getChild(JSContext *cx, const js::Shape &child, HeapPtr *listp); + inline void initDictionaryShape(const Shape &child, HeapPtrShape *dictp); + + Shape *getChildBinding(JSContext *cx, const Shape &child, HeapPtrShape *lastBinding); + + /* Replace the base shape of the last shape in a non-dictionary lineage with base. */ + static bool replaceLastProperty(JSContext *cx, const BaseShape &base, JSObject *proto, HeapPtrShape *lastp); bool hashify(JSContext *cx); + void handoffTableTo(Shape *newShape); - void setTable(js::PropertyTable *t) const { - JS_ASSERT_IF(t && t->freelist != SHAPE_INVALID_SLOT, t->freelist < slotSpan); - table = t; + inline void setParent(js::Shape *p); + + bool ensureOwnBaseShape(JSContext *cx) { + if (base()->isOwned()) + return true; + return makeOwnBaseShape(cx); } - /* - * Setter for parent. The challenge is to maintain JSObjectMap::slotSpan in - * the face of arbitrary slot order. - * - * By induction, an empty shape has a slotSpan member correctly computed as - * JSCLASS_FREE(clasp) -- see EmptyShape's constructor in jsscopeinlines.h. - * This is the basis case, where p is null. - * - * Any child shape, whether in a shape tree or in a dictionary list, must - * have a slotSpan either one greater than its slot value (if the child's - * slot is SHAPE_INVALID_SLOT, this will yield 0; the static assertion - * below enforces this), or equal to its parent p's slotSpan, whichever is - * greater. This is the inductive step. - * - * If we maintained shape paths such that parent slot was always one less - * than child slot, possibly with an exception for SHAPE_INVALID_SLOT slot - * values where we would use another way of computing slotSpan based on the - * PropertyTable (as JSC does), then we would not need to store slotSpan in - * Shape (to be precise, in its base struct, JSobjectMap). - * - * But we currently scramble slots along shape paths due to resolve-based - * creation of shapes mapping reserved slots, and we do not have the needed - * PropertyTable machinery to use as an alternative when parent slot is not - * one less than child slot. This machinery is neither simple nor free, as - * it must involve creating a table for any slot-less transition and then - * pinning the table to its shape. - * - * Use of 'delete' can scramble slots along the shape lineage too, although - * it always switches the target object to dictionary mode, so the cost of - * a pinned table is less onerous. - * - * Note that allocating a uint32 slotSpan member in JSObjectMap takes no - * net extra space on 64-bit targets (it packs with shape). And on 32-bit - * targets, adding slotSpan to JSObjectMap takes no gross extra space, - * because Shape rounds up to an even number of 32-bit words (required for - * GC-thing and js::Value allocation in any event) on 32-bit targets. - * - * So in terms of space, we can afford to maintain both slotSpan and slot, - * but it might be better if we eliminated slotSpan using slot combined - * with an auxiliary mechanism based on table. - */ - void setParent(js::Shape *p) { - JS_STATIC_ASSERT(uint32(SHAPE_INVALID_SLOT) == ~uint32(0)); - if (p) - slotSpan = JS_MAX(p->slotSpan, slot + 1); - JS_ASSERT(slotSpan < JSObject::NSLOTS_LIMIT); - parent = p; - } + bool makeOwnBaseShape(JSContext *cx); public: - static JS_FRIEND_DATA(Shape) sharedNonNative; + bool hasTable() const { return base()->hasTable(); } + js::PropertyTable &table() const { return base()->table(); } - bool hasTable() const { - /* A valid pointer should be much bigger than MAX_LINEAR_SEARCHES. */ - return numLinearSearches > PropertyTable::MAX_LINEAR_SEARCHES; + size_t sizeOfPropertyTable(JSMallocSizeOfFun mallocSizeOf) const { + return hasTable() ? table().sizeOfIncludingThis(mallocSizeOf) : 0; } - js::PropertyTable *getTable() const { - JS_ASSERT(hasTable()); - return table; - } - - size_t sizeOfPropertyTableIncludingThis(JSMallocSizeOfFun mallocSizeOf) const { - return hasTable() ? getTable()->sizeOfIncludingThis(mallocSizeOf) : 0; - } - - size_t sizeOfKidsIncludingThis(JSMallocSizeOfFun mallocSizeOf) const { + size_t sizeOfKids(JSMallocSizeOfFun mallocSizeOf) const { JS_ASSERT(!inDictionary()); return kids.isHash() ? kids.toHash()->sizeOfIncludingThis(mallocSizeOf) : 0; } - bool isNative() const { return this != &sharedNonNative; } + bool isNative() const { + JS_ASSERT(!(flags & NON_NATIVE) == getObjectClass()->isNative()); + return !(flags & NON_NATIVE); + } const js::Shape *previous() const { return parent; @@ -477,8 +607,7 @@ struct Shape : public js::gc::Cell Range(const Shape *shape) : cursor(shape) { } bool empty() const { - JS_ASSERT_IF(!cursor->parent, JSID_IS_EMPTY(cursor->propid)); - return !cursor->parent; + return cursor->isEmptyShape(); } const Shape &front() const { @@ -496,6 +625,18 @@ struct Shape : public js::gc::Cell return Range(this); } + Class *getObjectClass() const { return base()->clasp; } + JSObject *getObjectParent() const { return base()->parent; } + + static bool setObjectParent(JSContext *cx, JSObject *obj, JSObject *proto, HeapPtrShape *listp); + static bool setObjectFlag(JSContext *cx, BaseShape::Flag flag, JSObject *proto, HeapPtrShape *listp); + + uint32 getObjectFlags() const { return base()->flags & BaseShape::OBJECT_FLAG_MASK; } + bool hasObjectFlag(BaseShape::Flag flag) const { + JS_ASSERT(!(flag & ~BaseShape::OBJECT_FLAG_MASK)); + return !!(base()->flags & flag); + } + protected: /* * Implementation-private bits stored in shape->flags. See public: enum {} @@ -503,31 +644,33 @@ struct Shape : public js::gc::Cell * with these bits. */ enum { - SHARED_EMPTY = 0x01, + /* Property is placeholder for a non-native class. */ + NON_NATIVE = 0x01, /* Property stored in per-object dictionary, not shared property tree. */ IN_DICTIONARY = 0x02, - /* Prevent unwanted mutation of shared Bindings::lastBinding nodes. */ - FROZEN = 0x04, - - UNUSED_BITS = 0x38 + UNUSED_BITS = 0x3C }; - Shape(jsid id, PropertyOp getter, StrictPropertyOp setter, uint32 slot, uintN attrs, - uintN flags, intN shortid, uint32 shape = INVALID_SHAPE, uint32 slotSpan = 0); + Shape(BaseShape *base, jsid id, uint32 slot, uint32 nfixed, uintN attrs, uintN flags, intN shortid); + + /* Get a shape identical to this one, without parent/kids information. */ + Shape(const Shape *other); /* Used by EmptyShape (see jsscopeinlines.h). */ - Shape(JSCompartment *comp, Class *aclasp); + Shape(BaseShape *base, uint32 nfixed); - /* Used by sharedNonNative. */ - explicit Shape(uint32 shape); + /* Copy constructor disabled, to avoid misuse of the above form. */ + Shape(const Shape &other); - protected: - bool frozen() const { return (flags & FROZEN) != 0; } - void setFrozen() { flags |= FROZEN; } - - bool isEmptyShape() const { JS_ASSERT_IF(!parent, JSID_IS_EMPTY(propid)); return !parent; } + /* + * Whether this shape has a valid slot value. This may be true even if + * !hasSlot() (see SlotInfo comment above), and may be false even if + * hasSlot() if the shape is being constructed and has not had a slot + * assigned yet. After construction, hasSlot() implies !hasMissingSlot(). + */ + bool hasMissingSlot() const { return maybeSlot() == SHAPE_INVALID_SLOT; } public: /* Public bits stored in shape->flags. */ @@ -540,52 +683,136 @@ struct Shape : public js::gc::Cell bool inDictionary() const { return (flags & IN_DICTIONARY) != 0; } uintN getFlags() const { return flags & PUBLIC_FLAGS; } bool hasShortID() const { return (flags & HAS_SHORTID) != 0; } - bool isMethod() const { return (flags & METHOD) != 0; } - JSObject &methodObject() const { JS_ASSERT(isMethod()); return *getterObj; } + /* + * A shape has a method barrier when some compiler-created "null closure" + * function objects (functions that do not use lexical bindings above their + * scope, only free variable names) that have a correct JSSLOT_PARENT value + * thanks to the COMPILE_N_GO optimization are stored in objects without + * cloning. + * + * The de-facto standard JS language requires each evaluation of such a + * closure to result in a unique (according to === and observable effects) + * function object. When storing a function to a property, we use method + * shapes to speculate that these effects will never be observed: the + * property will only be used in calls, and f.callee will not be used + * to get a handle on the object. + * + * If either a non-call use or callee access occurs, then the function is + * cloned and the object is reshaped with a non-method property. + * + * Note that method shapes do not imply the object has a particular + * uncloned function, just that the object has *some* uncloned function + * in the shape's slot. + */ + bool isMethod() const { + JS_ASSERT_IF(flags & METHOD, !base()->rawGetter); + return (flags & METHOD) != 0; + } - PropertyOp getter() const { return rawGetter; } - bool hasDefaultGetter() const { return !rawGetter; } - PropertyOp getterOp() const { JS_ASSERT(!hasGetterValue()); return rawGetter; } - JSObject *getterObject() const { JS_ASSERT(hasGetterValue()); return getterObj; } + PropertyOp getter() const { return base()->rawGetter; } + bool hasDefaultGetterOrIsMethod() const { return !base()->rawGetter; } + bool hasDefaultGetter() const { return !base()->rawGetter && !isMethod(); } + PropertyOp getterOp() const { JS_ASSERT(!hasGetterValue()); return base()->rawGetter; } + JSObject *getterObject() const { JS_ASSERT(hasGetterValue()); return base()->getterObj; } // Per ES5, decode null getterObj as the undefined value, which encodes as null. Value getterValue() const { JS_ASSERT(hasGetterValue()); - return getterObj ? js::ObjectValue(*getterObj) : js::UndefinedValue(); + return base()->getterObj ? js::ObjectValue(*base()->getterObj) : js::UndefinedValue(); } Value getterOrUndefined() const { - return hasGetterValue() && getterObj ? js::ObjectValue(*getterObj) : js::UndefinedValue(); + return (hasGetterValue() && base()->getterObj) + ? ObjectValue(*base()->getterObj) + : UndefinedValue(); } - StrictPropertyOp setter() const { return rawSetter; } - bool hasDefaultSetter() const { return !rawSetter; } - StrictPropertyOp setterOp() const { JS_ASSERT(!hasSetterValue()); return rawSetter; } - JSObject *setterObject() const { JS_ASSERT(hasSetterValue()); return setterObj; } + StrictPropertyOp setter() const { return base()->rawSetter; } + bool hasDefaultSetter() const { return !base()->rawSetter; } + StrictPropertyOp setterOp() const { JS_ASSERT(!hasSetterValue()); return base()->rawSetter; } + JSObject *setterObject() const { JS_ASSERT(hasSetterValue()); return base()->setterObj; } // Per ES5, decode null setterObj as the undefined value, which encodes as null. Value setterValue() const { JS_ASSERT(hasSetterValue()); - return setterObj ? js::ObjectValue(*setterObj) : js::UndefinedValue(); + return base()->setterObj ? js::ObjectValue(*base()->setterObj) : js::UndefinedValue(); } Value setterOrUndefined() const { - return hasSetterValue() && setterObj ? js::ObjectValue(*setterObj) : js::UndefinedValue(); + return (hasSetterValue() && base()->setterObj) + ? ObjectValue(*base()->setterObj) + : UndefinedValue(); } void update(js::PropertyOp getter, js::StrictPropertyOp setter, uint8 attrs); inline JSDHashNumber hash() const; inline bool matches(const js::Shape *p) const; - inline bool matchesParamsAfterId(PropertyOp agetter, StrictPropertyOp asetter, + inline bool matchesParamsAfterId(BaseShape *base, uint32 aslot, uintN aattrs, uintN aflags, intN ashortid) const; bool get(JSContext* cx, JSObject *receiver, JSObject *obj, JSObject *pobj, js::Value* vp) const; bool set(JSContext* cx, JSObject *obj, bool strict, js::Value* vp) const; + BaseShape *base() const { return base_; } + bool hasSlot() const { return (attrs & JSPROP_SHARED) == 0; } + uint32 slot() const { JS_ASSERT(hasSlot() && !hasMissingSlot()); return maybeSlot(); } + uint32 maybeSlot() const { return slotInfo & SLOT_MASK; } + + bool isEmptyShape() const { + JS_ASSERT_IF(JSID_IS_EMPTY(propid_), hasMissingSlot()); + return JSID_IS_EMPTY(propid_); + } + + uint32 slotSpan() const { + JS_ASSERT(!inDictionary()); + uint32 free = JSSLOT_FREE(getObjectClass()); + return hasMissingSlot() ? free : Max(free, maybeSlot() + 1); + } + + void setSlot(uint32 slot) { + JS_ASSERT(slot <= SHAPE_INVALID_SLOT); + slotInfo = slotInfo & ~SLOT_MASK; + slotInfo = slotInfo | slot; + } + + uint32 numFixedSlots() const { + return (slotInfo >> FIXED_SLOTS_SHIFT); + } + + void setNumFixedSlots(uint32 nfixed) { + JS_ASSERT(nfixed < FIXED_SLOTS_MAX); + slotInfo = slotInfo & ~FIXED_SLOTS_MASK; + slotInfo = slotInfo | (nfixed << FIXED_SLOTS_SHIFT); + } + + uint32 numLinearSearches() const { + return (slotInfo & LINEAR_SEARCHES_MASK) >> LINEAR_SEARCHES_SHIFT; + } + + void incrementNumLinearSearches() { + uint32 count = numLinearSearches(); + JS_ASSERT(count < LINEAR_SEARCHES_MAX); + slotInfo = slotInfo & ~LINEAR_SEARCHES_MASK; + slotInfo = slotInfo | ((count + 1) << LINEAR_SEARCHES_SHIFT); + } + + jsid propid() const { JS_ASSERT(!isEmptyShape()); return maybePropid(); } + jsid maybePropid() const { JS_ASSERT(!JSID_IS_VOID(propid_)); return propid_; } + + int16 shortid() const { JS_ASSERT(hasShortID()); return maybeShortid(); } + int16 maybeShortid() const { return shortid_; } + + /* + * If SHORTID is set in shape->flags, we use shape->shortid rather + * than id when calling shape's getter or setter. + */ + jsid getUserId() const { + return hasShortID() ? INT_TO_JSID(shortid()) : propid(); + } uint8 attributes() const { return attrs; } bool configurable() const { return (attrs & JSPROP_PERMANENT) == 0; } @@ -597,10 +824,6 @@ struct Shape : public js::gc::Cell bool hasGetterValue() const { return attrs & JSPROP_GETTER; } bool hasSetterValue() const { return attrs & JSPROP_SETTER; } - bool hasDefaultGetterOrIsMethod() const { - return hasDefaultGetter() || isMethod(); - } - bool isDataDescriptor() const { return (attrs & (JSPROP_SETTER | JSPROP_GETTER)) == 0; } @@ -619,9 +842,55 @@ struct Shape : public js::gc::Cell return hasSlot() || (attrs & JSPROP_SHADOWABLE); } + /* + * Sometimes call objects and run-time block objects need unique shapes, but + * sometimes they don't. + * + * Property cache entries only record the shapes of the first and last + * objects along the search path, so if the search traverses more than those + * two objects, then those first and last shapes must determine the shapes + * of everything else along the path. The js_PurgeScopeChain stuff takes + * care of making this work, but that suffices only because we require that + * start points with the same shape have the same successor object in the + * search path --- a cache hit means the starting shapes were equal, which + * means the seach path tail (everything but the first object in the path) + * was shared, which in turn means the effects of a purge will be seen by + * all affected starting search points. + * + * For call and run-time block objects, the "successor object" is the scope + * chain parent. Unlike prototype objects (of which there are usually few), + * scope chain parents are created frequently (possibly on every call), so + * following the shape-implies-parent rule blindly would lead one to give + * every call and block its own shape. + * + * In many cases, however, it's not actually necessary to give call and + * block objects their own shapes, and we can do better. If the code will + * always be used with the same global object, and none of the enclosing + * call objects could have bindings added to them at runtime (by direct eval + * calls or function statements), then we can use a fixed set of shapes for + * those objects. You could think of the shapes in the functions' bindings + * and compile-time blocks as uniquely identifying the global object(s) at + * the end of the scope chain. + * + * (In fact, some JSScripts we do use against multiple global objects (see + * bug 618497), and using the fixed shapes isn't sound there.) + * + * In deciding whether a call or block has any extensible parents, we + * actually only need to consider enclosing calls; blocks are never + * extensible, and the other sorts of objects that appear in the scope + * chains ('with' blocks, say) are not CacheableNonGlobalScopes. + * + * If the hasExtensibleParents flag is set for the last property in a + * script's bindings or a compiler-generated Block object, then created + * Call or Block objects need unique shapes. If the flag is clear, then we + * can use lastBinding's shape. + */ + static bool setExtensibleParents(JSContext *cx, HeapPtrShape *listp); + bool extensibleParents() const { return !!(base()->flags & BaseShape::EXTENSIBLE_PARENTS); } + uint32 entryCount() const { if (hasTable()) - return getTable()->entryCount; + return table().entryCount; const js::Shape *shape = this; uint32 count = 0; @@ -647,54 +916,86 @@ struct Shape : public js::gc::Cell void dumpSubtree(JSContext *cx, int level, FILE *fp) const; #endif - void finalize(JSContext *cx); + void finalize(JSContext *cx, bool background); void removeChild(js::Shape *child); - void freezeIfDictionary() { - if (inDictionary()) { - Shape *shape = this; - do { - JS_ASSERT(!shape->frozen()); - shape->setFrozen(); - } while ((shape = shape->parent) != NULL); - } - } - - inline static void writeBarrierPre(const js::Shape *shape); - inline static void writeBarrierPost(const js::Shape *shape, void *addr); + static inline void writeBarrierPre(const Shape *shape); + static inline void writeBarrierPost(const Shape *shape, void *addr); /* * All weak references need a read barrier for incremental GC. This getter * method implements the read barrier. It's used to obtain initial shapes * from the compartment. */ - inline static void readBarrier(const js::Shape *shape); + static inline void readBarrier(const Shape *shape); + + /* For JIT usage */ + static inline size_t offsetOfBase() { return offsetof(Shape, base_); } + + private: + static void staticAsserts() { + JS_STATIC_ASSERT(offsetof(Shape, base_) == offsetof(js::shadow::Shape, base)); + JS_STATIC_ASSERT(offsetof(Shape, slotInfo) == offsetof(js::shadow::Shape, slotInfo)); + JS_STATIC_ASSERT(FIXED_SLOTS_SHIFT == js::shadow::Shape::FIXED_SLOTS_SHIFT); + } }; struct EmptyShape : public js::Shape { - EmptyShape(JSCompartment *comp, js::Class *aclasp); + EmptyShape(BaseShape *base, uint32 nfixed); - js::Class *getClass() const { return clasp; }; + /* + * Lookup an initial shape matching the given parameters, creating an empty + * shape if none was found. + */ + static Shape *getInitialShape(JSContext *cx, Class *clasp, JSObject *proto, + JSObject *parent, gc::AllocKind kind, uint32 objectFlags = 0); - static EmptyShape *create(JSContext *cx, js::Class *clasp) { - js::Shape *eprop = JS_PROPERTY_TREE(cx).newShape(cx); - if (!eprop) - return NULL; - return new (eprop) EmptyShape(cx->compartment, clasp); - } - - static inline EmptyShape *ensure(JSContext *cx, js::Class *clasp, - ReadBarriered *shapep); - - static inline EmptyShape *getEmptyArgumentsShape(JSContext *cx); - static inline EmptyShape *getEmptyBlockShape(JSContext *cx); - static inline EmptyShape *getEmptyCallShape(JSContext *cx); - static inline EmptyShape *getEmptyDeclEnvShape(JSContext *cx); - static inline EmptyShape *getEmptyEnumeratorShape(JSContext *cx); - static inline EmptyShape *getEmptyWithShape(JSContext *cx); + /* + * Reinsert an alternate initial shape, to be returned by future + * getInitialShape calls, until the new shape becomes unreachable in a GC + * and the table entry is purged. + */ + static void insertInitialShape(JSContext *cx, Shape *shape, JSObject *proto); }; +/* + * Entries for the per-compartment initialShapes set indexing initial shapes + * for objects in the compartment and the associated types. + */ +struct InitialShapeEntry +{ + /* + * Initial shape to give to the object. This is an empty shape, except for + * certain classes (e.g. String, RegExp) which may add certain baked-in + * properties. + */ + js::Shape *shape; + + /* + * Matching prototype for the entry. The shape of an object determines its + * prototype, but the prototype cannot be determined from the shape itself. + */ + JSObject *proto; + + /* State used to determine a match on an initial shape. */ + struct Lookup { + Class *clasp; + JSObject *proto; + JSObject *parent; + uint32 nfixed; + uint32 baseFlags; + Lookup(Class *clasp, JSObject *proto, JSObject *parent, uint32 nfixed, uint32 baseFlags) + : clasp(clasp), proto(proto), parent(parent), + nfixed(nfixed), baseFlags(baseFlags) + {} + }; + + static inline HashNumber hash(const Lookup &lookup); + static inline bool match(const InitialShapeEntry &key, const Lookup &lookup); +}; +typedef HashSet InitialShapeSet; + } /* namespace js */ /* js::Shape pointer tag bit indicating a collision. */ @@ -716,20 +1017,6 @@ struct EmptyShape : public js::Shape #define SHAPE_STORE_PRESERVING_COLLISION(spp, shape) \ (*(spp) = (js::Shape *) (jsuword(shape) | SHAPE_HAD_COLLISION(*(spp)))) -/* - * If SHORTID is set in shape->flags, we use shape->shortid rather - * than id when calling shape's getter or setter. - */ -#define SHAPE_USERID(shape) \ - ((shape)->hasShortID() ? INT_TO_JSID((shape)->shortid) \ - : (shape)->propid) - -extern uint32 -js_GenerateShape(JSRuntime *rt); - -extern uint32 -js_GenerateShape(JSContext *cx); - namespace js { /* @@ -743,37 +1030,36 @@ namespace js { * There is one failure case: we return &emptyShape->parent, where * |emptyShape| is the EmptyShape at the start of the shape lineage. */ -JS_ALWAYS_INLINE js::Shape ** -Shape::search(JSContext *cx, HeapPtr *startp, jsid id, bool adding) +JS_ALWAYS_INLINE Shape ** +Shape::search(JSContext *cx, HeapPtrShape *pstart, jsid id, bool adding) { - js::Shape *start = *startp; + Shape *start = *pstart; if (start->hasTable()) - return start->getTable()->search(id, adding); + return start->table().search(id, adding); - if (start->numLinearSearches == PropertyTable::MAX_LINEAR_SEARCHES) { + if (start->numLinearSearches() == LINEAR_SEARCHES_MAX) { if (start->isBigEnoughForAPropertyTable() && start->hashify(cx)) - return start->getTable()->search(id, adding); + return start->table().search(id, adding); /* * No table built -- there weren't enough entries, or OOM occurred. * Don't increment numLinearSearches, to keep hasTable() false. */ JS_ASSERT(!start->hasTable()); } else { - JS_ASSERT(start->numLinearSearches < PropertyTable::MAX_LINEAR_SEARCHES); - start->numLinearSearches++; + start->incrementNumLinearSearches(); } /* * Not enough searches done so far to justify hashing: search linearly - * from *startp. + * from start. * * We don't use a Range here, or stop at null parent (the empty shape at * the end). This avoids an extra load per iteration at the cost (if the * search fails) of an extra load and id test at the end. */ - HeapPtr *spp; - for (spp = startp; js::Shape *shape = *spp; spp = &shape->parent) { - if (shape->propid.get() == id) + HeapPtrShape *spp; + for (spp = pstart; js::Shape *shape = *spp; spp = &shape->parent) { + if (shape->maybePropid() == id) return spp->unsafeGet(); } return spp->unsafeGet(); @@ -786,4 +1072,33 @@ Shape::search(JSContext *cx, HeapPtr *startp, jsid id, bool adding) #pragma warning(pop) #endif +inline js::Class * +JSObject::getClass() const +{ + return lastProperty()->getObjectClass(); +} + +inline JSClass * +JSObject::getJSClass() const +{ + return Jsvalify(getClass()); +} + +inline bool +JSObject::hasClass(const js::Class *c) const +{ + return getClass() == c; +} + +inline const js::ObjectOps * +JSObject::getOps() const +{ + return &getClass()->ops; +} + +namespace JS { + template<> class AnchorPermitted { }; + template<> class AnchorPermitted { }; +} + #endif /* jsscope_h___ */ diff --git a/js/src/jsscopeinlines.h b/js/src/jsscopeinlines.h index 3545b0f46ea9..b3aae3eafeea 100644 --- a/js/src/jsscopeinlines.h +++ b/js/src/jsscopeinlines.h @@ -58,220 +58,178 @@ #include "jsgcinlines.h" #include "jsobjinlines.h" -inline void -js::Shape::freeTable(JSContext *cx) +inline bool +JSObject::maybeSetIndexed(JSContext *cx, jsid id) { - if (hasTable()) { - cx->delete_(getTable()); - setTable(NULL); + jsuint index; + if (js_IdIsIndex(id, &index)) { + if (!setIndexed(cx)) + return false; } -} - -inline js::EmptyShape * -js::types::TypeObject::getEmptyShape(JSContext *cx, js::Class *aclasp, - gc::AllocKind kind) -{ - JS_ASSERT(!singleton); - - /* - * Empty shapes can only be on the default 'new' type for a prototype. - * Objects with a common prototype use the same shape lineage, even if - * their prototypes differ. - */ - JS_ASSERT(this == proto->newType); - - JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST); - int i = kind - js::gc::FINALIZE_OBJECT0; - - if (!emptyShapes) { - emptyShapes = (js::HeapPtr*) - cx->calloc_(sizeof(js::HeapPtr) * js::gc::FINALIZE_OBJECT_LIMIT); - if (!emptyShapes) - return NULL; - - /* - * Always fill in emptyShapes[0], so canProvideEmptyShape works. - * Other empty shapes are filled in lazily. - */ - emptyShapes[0].init(js::EmptyShape::create(cx, aclasp)); - if (!emptyShapes[0]) { - cx->free_(emptyShapes); - emptyShapes = NULL; - return NULL; - } - } - - JS_ASSERT(aclasp == emptyShapes[0]->getClass()); - - if (!emptyShapes[i]) { - emptyShapes[i].init(js::EmptyShape::create(cx, aclasp)); - if (!emptyShapes[i]) - return NULL; - } - - return emptyShapes[i]; + return true; } inline bool -js::types::TypeObject::canProvideEmptyShape(js::Class *aclasp) -{ - return proto && !singleton && (!emptyShapes || emptyShapes[0]->getClass() == aclasp); -} - -inline void -JSObject::updateShape(JSContext *cx) -{ - JS_ASSERT(isNative()); - if (hasOwnShape()) - setOwnShape(js_GenerateShape(cx)); - else - objShape = lastProp->shapeid; -} - -inline void -JSObject::updateFlags(const js::Shape *shape, bool isDefinitelyAtom) -{ - jsuint index; - if (!isDefinitelyAtom && js_IdIsIndex(shape->propid, &index)) - setIndexed(); - - if (shape->isMethod()) - setMethodBarrier(); -} - -inline void JSObject::extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom) { - setLastProperty(shape); - updateFlags(shape, isDefinitelyAtom); - updateShape(cx); + if (!isDefinitelyAtom && !maybeSetIndexed(cx, shape->propid())) + return false; + if (!setLastProperty(cx, shape)) + return false; + return true; } namespace js { -inline bool -StringObject::init(JSContext *cx, JSString *str) +inline +BaseShape::BaseShape(Class *clasp, JSObject *parent, uint32 objectFlags) { - JS_ASSERT(nativeEmpty()); - - const Shape *shape = cx->compartment->initialStringShape; - if (shape) { - setLastProperty(shape); - } else { - shape = assignInitialShape(cx); - if (!shape) - return false; - cx->compartment->initialStringShape = shape; - } - JS_ASSERT(shape == lastProperty()); - JS_ASSERT(!nativeEmpty()); - JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom))->slot == LENGTH_SLOT); - - setStringThis(str); - return true; + JS_ASSERT(!(objectFlags & ~OBJECT_FLAG_MASK)); + PodZero(this); + this->clasp = clasp; + this->parent = parent; + this->flags = objectFlags; } inline -Shape::Shape(jsid propid, PropertyOp getter, StrictPropertyOp setter, uint32 slot, - uintN attrs, uintN flags, intN shortid, uint32 shapeid, uint32 slotSpan) - : shapeid(shapeid), - slotSpan(slotSpan), - numLinearSearches(0), - propid(propid), - rawGetter(getter), - rawSetter(setter), - slot(slot), +BaseShape::BaseShape(Class *clasp, JSObject *parent, uint32 objectFlags, + uint8 attrs, js::PropertyOp rawGetter, js::StrictPropertyOp rawSetter) +{ + JS_ASSERT(!(objectFlags & ~OBJECT_FLAG_MASK)); + PodZero(this); + this->clasp = clasp; + this->parent = parent; + this->flags = objectFlags; + this->rawGetter = rawGetter; + this->rawSetter = rawSetter; + if ((attrs & JSPROP_GETTER) && rawGetter) + flags |= HAS_GETTER_OBJECT; + if ((attrs & JSPROP_SETTER) && rawSetter) + flags |= HAS_SETTER_OBJECT; +} + +inline bool +BaseShape::matchesGetterSetter(PropertyOp rawGetter, StrictPropertyOp rawSetter) const +{ + return rawGetter == this->rawGetter && rawSetter == this->rawSetter; +} + +inline void +BaseShape::setParent(JSObject *obj) +{ + parent = obj; +} + +inline void +BaseShape::adoptUnowned(UnownedBaseShape *other) +{ + /* + * This is a base shape owned by a dictionary object, update it to reflect the + * unowned base shape of a new last property. + */ + JS_ASSERT(isOwned()); + + JSObject *parent = this->parent; + uint32 flags = (this->flags & OBJECT_FLAG_MASK); + + uint32 span = slotSpan(); + PropertyTable *table = &this->table(); + + *this = *other; + setOwned(other); + this->parent = parent; + this->flags |= flags; + setTable(table); + setSlotSpan(span); +} + +inline void +BaseShape::setOwned(UnownedBaseShape *unowned) +{ + flags |= OWNED_SHAPE; + this->unowned_ = unowned; +} + +inline +Shape::Shape(BaseShape *base, jsid propid, uint32 slot, uint32 nfixed, + uintN attrs, uintN flags, intN shortid) + : base_(base), + propid_(propid), + slotInfo(slot | (nfixed << FIXED_SLOTS_SHIFT)), attrs(uint8(attrs)), flags(uint8(flags)), - shortid(int16(shortid)), + shortid_(int16(shortid)), parent(NULL) { - JS_ASSERT_IF(slotSpan != SHAPE_INVALID_SLOT, slotSpan < JSObject::NSLOTS_LIMIT); - JS_ASSERT_IF(getter && (attrs & JSPROP_GETTER), getterObj->isCallable()); - JS_ASSERT_IF(setter && (attrs & JSPROP_SETTER), setterObj->isCallable()); + JS_ASSERT(base); + JS_ASSERT(!JSID_IS_VOID(propid)); + JS_ASSERT_IF(isMethod(), !base->rawGetter); kids.setNull(); } inline -Shape::Shape(JSCompartment *comp, Class *aclasp) - : shapeid(js_GenerateShape(comp->rt)), - slotSpan(JSSLOT_FREE(aclasp)), - numLinearSearches(0), - propid(JSID_EMPTY), - clasp(aclasp), - rawSetter(NULL), - slot(SHAPE_INVALID_SLOT), - attrs(0), - flags(SHARED_EMPTY), - shortid(0), +Shape::Shape(const Shape *other) + : base_(other->base()->unowned()), + propid_(other->maybePropid()), + slotInfo(other->slotInfo & ~LINEAR_SEARCHES_MASK), + attrs(other->attrs), + flags(other->flags), + shortid_(other->maybeShortid()), parent(NULL) { kids.setNull(); } inline -Shape::Shape(uint32 shapeid) - : shapeid(shapeid), - slotSpan(0), - numLinearSearches(0), - propid(JSID_EMPTY), - clasp(NULL), - rawSetter(NULL), - slot(SHAPE_INVALID_SLOT), - attrs(0), - flags(SHARED_EMPTY), - shortid(0), +Shape::Shape(BaseShape *base, uint32 nfixed) + : base_(base), + propid_(JSID_EMPTY), + slotInfo(SHAPE_INVALID_SLOT | (nfixed << FIXED_SLOTS_SHIFT)), + attrs(JSPROP_SHARED), + flags(0), + shortid_(0), parent(NULL) { + JS_ASSERT(base); kids.setNull(); } inline JSDHashNumber Shape::hash() const { - JSDHashNumber hash = 0; + JSDHashNumber hash = jsuword(base()->unowned()); /* Accumulate from least to most random so the low bits are most random. */ - JS_ASSERT_IF(isMethod(), !rawSetter); - if (getter()) - hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(getter()); - if (setter()) - hash = JS_ROTATE_LEFT32(hash, 4) ^ jsuword(setter()); hash = JS_ROTATE_LEFT32(hash, 4) ^ (flags & PUBLIC_FLAGS); hash = JS_ROTATE_LEFT32(hash, 4) ^ attrs; - hash = JS_ROTATE_LEFT32(hash, 4) ^ shortid; - hash = JS_ROTATE_LEFT32(hash, 4) ^ slot; - hash = JS_ROTATE_LEFT32(hash, 4) ^ JSID_BITS(propid.get()); + hash = JS_ROTATE_LEFT32(hash, 4) ^ shortid_; + hash = JS_ROTATE_LEFT32(hash, 4) ^ maybeSlot(); + hash = JS_ROTATE_LEFT32(hash, 4) ^ JSID_BITS(propid_.get()); return hash; } inline bool Shape::matches(const js::Shape *other) const { - JS_ASSERT(!JSID_IS_VOID(propid)); - JS_ASSERT(!JSID_IS_VOID(other->propid)); - return propid.get() == other->propid.get() && - matchesParamsAfterId(other->getter(), other->setter(), other->slot, other->attrs, - other->flags, other->shortid); + return propid_.get() == other->propid_.get() && + matchesParamsAfterId(other->base(), other->maybeSlot(), other->attrs, + other->flags, other->shortid_); } inline bool -Shape::matchesParamsAfterId(PropertyOp agetter, StrictPropertyOp asetter, uint32 aslot, +Shape::matchesParamsAfterId(BaseShape *base, uint32 aslot, uintN aattrs, uintN aflags, intN ashortid) const { - JS_ASSERT(!JSID_IS_VOID(propid)); - return getter() == agetter && - setter() == asetter && - slot == aslot && + return base->unowned() == this->base()->unowned() && + maybeSlot() == aslot && attrs == aattrs && ((flags ^ aflags) & PUBLIC_FLAGS) == 0 && - shortid == ashortid; + shortid_ == ashortid; } inline bool Shape::get(JSContext* cx, JSObject *receiver, JSObject* obj, JSObject *pobj, js::Value* vp) const { - JS_ASSERT(!JSID_IS_VOID(propid)); JS_ASSERT(!hasDefaultGetter()); if (hasGetterValue()) { @@ -281,7 +239,7 @@ Shape::get(JSContext* cx, JSObject *receiver, JSObject* obj, JSObject *pobj, js: } if (isMethod()) { - vp->setObject(methodObject()); + vp->setObject(*pobj->nativeGetMethod(this)); return pobj->methodReadBarrier(cx, *this, vp); } @@ -291,7 +249,7 @@ Shape::get(JSContext* cx, JSObject *receiver, JSObject* obj, JSObject *pobj, js: */ if (obj->isWith()) obj = js_UnwrapWithObject(cx, obj); - return js::CallJSPropertyOp(cx, getterOp(), receiver, SHAPE_USERID(this), vp); + return js::CallJSPropertyOp(cx, getterOp(), receiver, getUserId(), vp); } inline bool @@ -310,22 +268,28 @@ Shape::set(JSContext* cx, JSObject* obj, bool strict, js::Value* vp) const /* See the comment in js::Shape::get as to why we check for With. */ if (obj->isWith()) obj = js_UnwrapWithObject(cx, obj); - return js::CallJSPropertyOpSetter(cx, setterOp(), obj, SHAPE_USERID(this), strict, vp); + return js::CallJSPropertyOpSetter(cx, setterOp(), obj, getUserId(), strict, vp); } inline void -Shape::removeFromDictionary(JSObject *obj) const +Shape::setParent(js::Shape *p) +{ + JS_ASSERT_IF(p && !p->hasMissingSlot() && !inDictionary(), + p->maybeSlot() <= maybeSlot()); + JS_ASSERT_IF(p && !inDictionary(), + hasSlot() == (p->maybeSlot() != maybeSlot())); + parent = p; +} + +inline void +Shape::removeFromDictionary(JSObject *obj) { - JS_ASSERT(!frozen()); JS_ASSERT(inDictionary()); JS_ASSERT(obj->inDictionaryMode()); JS_ASSERT(listp); - JS_ASSERT(!JSID_IS_VOID(propid)); - JS_ASSERT(obj->lastProp->inDictionary()); - JS_ASSERT(obj->lastProp->listp == &obj->lastProp); - JS_ASSERT_IF(obj->lastProp != this, !JSID_IS_VOID(obj->lastProp->propid)); - JS_ASSERT_IF(obj->lastProp->parent, !JSID_IS_VOID(obj->lastProp->parent->propid)); + JS_ASSERT(obj->shape_->inDictionary()); + JS_ASSERT(obj->shape_->listp == &obj->shape_); if (parent) parent->listp = listp; @@ -342,12 +306,9 @@ Shape::insertIntoDictionary(HeapPtr *dictp) */ JS_ASSERT(inDictionary()); JS_ASSERT(!listp); - JS_ASSERT(!JSID_IS_VOID(propid)); - JS_ASSERT_IF(*dictp, !(*dictp)->frozen()); JS_ASSERT_IF(*dictp, (*dictp)->inDictionary()); JS_ASSERT_IF(*dictp, (*dictp)->listp == dictp); - JS_ASSERT_IF(*dictp, !JSID_IS_VOID((*dictp)->propid)); JS_ASSERT_IF(*dictp, compartment() == (*dictp)->compartment()); setParent(*dictp); @@ -357,64 +318,33 @@ Shape::insertIntoDictionary(HeapPtr *dictp) *dictp = this; } +void +Shape::initDictionaryShape(const Shape &child, HeapPtrShape *dictp) +{ + UnownedBaseShape *base = child.base()->unowned(); + + new (this) Shape(base, child.maybePropid(), + child.maybeSlot(), child.numFixedSlots(), child.attrs, + child.flags | IN_DICTIONARY, child.maybeShortid()); + + this->listp = NULL; + insertIntoDictionary(dictp); +} + inline -EmptyShape::EmptyShape(JSCompartment *comp, js::Class *aclasp) - : js::Shape(comp, aclasp) -{} - -/* static */ inline EmptyShape * -EmptyShape::ensure(JSContext *cx, js::Class *clasp, ReadBarriered *shapep) +EmptyShape::EmptyShape(BaseShape *base, uint32 nfixed) + : js::Shape(base, nfixed) { - EmptyShape *shape = shapep->get(); - if (!shape) { - if (!(shape = create(cx, clasp))) - return NULL; - shapep->set(shape); - } - return shape; -} - -/* static */ inline EmptyShape * -EmptyShape::getEmptyArgumentsShape(JSContext *cx) -{ - return ensure(cx, &NormalArgumentsObjectClass, &cx->compartment->emptyArgumentsShape); -} - -/* static */ inline EmptyShape * -EmptyShape::getEmptyBlockShape(JSContext *cx) -{ - return ensure(cx, &BlockClass, &cx->compartment->emptyBlockShape); -} - -/* static */ inline EmptyShape * -EmptyShape::getEmptyCallShape(JSContext *cx) -{ - return ensure(cx, &CallClass, &cx->compartment->emptyCallShape); -} - -/* static */ inline EmptyShape * -EmptyShape::getEmptyDeclEnvShape(JSContext *cx) -{ - return ensure(cx, &DeclEnvClass, &cx->compartment->emptyDeclEnvShape); -} - -/* static */ inline EmptyShape * -EmptyShape::getEmptyEnumeratorShape(JSContext *cx) -{ - return ensure(cx, &IteratorClass, &cx->compartment->emptyEnumeratorShape); -} - -/* static */ inline EmptyShape * -EmptyShape::getEmptyWithShape(JSContext *cx) -{ - return ensure(cx, &WithClass, &cx->compartment->emptyWithShape); + /* Only empty shapes can be NON_NATIVE. */ + if (!getObjectClass()->isNative()) + flags |= NON_NATIVE; } inline void Shape::writeBarrierPre(const js::Shape *shape) { #ifdef JSGC_INCREMENTAL - if (!shape || shape == &sharedNonNative) + if (!shape) return; JSCompartment *comp = shape->compartment(); @@ -429,12 +359,42 @@ Shape::writeBarrierPost(const js::Shape *shape, void *addr) } inline void -Shape::readBarrier(const js::Shape *shape) +Shape::readBarrier(const Shape *shape) { #ifdef JSGC_INCREMENTAL JSCompartment *comp = shape->compartment(); + JS_ASSERT(comp->needsBarrier()); + + MarkShapeUnbarriered(comp->barrierTracer(), shape, "read barrier"); +#endif +} + +inline void +BaseShape::writeBarrierPre(BaseShape *base) +{ +#ifdef JSGC_INCREMENTAL + if (!base) + return; + + JSCompartment *comp = base->compartment(); if (comp->needsBarrier()) - MarkShapeUnbarriered(comp->barrierTracer(), shape, "read barrier"); + MarkBaseShapeUnbarriered(comp->barrierTracer(), base, "write barrier"); +#endif +} + +inline void +BaseShape::writeBarrierPost(BaseShape *shape, void *addr) +{ +} + +inline void +BaseShape::readBarrier(BaseShape *base) +{ +#ifdef JSGC_INCREMENTAL + JSCompartment *comp = base->compartment(); + JS_ASSERT(comp->needsBarrier()); + + MarkBaseShapeUnbarriered(comp->barrierTracer(), base, "read barrier"); #endif } diff --git a/js/src/jsscript.cpp b/js/src/jsscript.cpp index dce642683580..898e5d5f6dfd 100644 --- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -90,13 +90,13 @@ Bindings::lookup(JSContext *cx, JSAtom *name, uintN *indexp) const return NONE; Shape *shape = - SHAPE_FETCH(Shape::search(cx, const_cast *>(&lastBinding), - ATOM_TO_JSID(name))); + SHAPE_FETCH(Shape::search(cx, const_cast(&lastBinding), + ATOM_TO_JSID(name))); if (!shape) return NONE; if (indexp) - *indexp = shape->shortid; + *indexp = shape->shortid(); if (shape->getter() == GetCallArg) return ARGUMENT; @@ -117,7 +117,7 @@ Bindings::add(JSContext *cx, JSAtom *name, BindingKind kind) * of the Call objects enumerable. ES5 reformulated all of its Clause 10 to * avoid objects as activations, something we should do too. */ - uintN attrs = JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED; + uintN attrs = JSPROP_ENUMERATE | JSPROP_PERMANENT; uint16 *indexp; PropertyOp getter; @@ -135,7 +135,8 @@ Bindings::add(JSContext *cx, JSAtom *name, BindingKind kind) indexp = &nupvars; getter = GetCallUpvar; setter = SetCallUpvar; - slot = SHAPE_INVALID_SLOT; + slot = lastBinding->maybeSlot(); + attrs |= JSPROP_SHARED; } else { JS_ASSERT(kind == VARIABLE || kind == CONSTANT); JS_ASSERT(nupvars == 0); @@ -164,9 +165,15 @@ Bindings::add(JSContext *cx, JSAtom *name, BindingKind kind) id = ATOM_TO_JSID(name); } - Shape child(id, getter, setter, slot, attrs, Shape::HAS_SHORTID, *indexp); + BaseShape base(&CallClass, NULL, BaseShape::VAROBJ, attrs, getter, setter); + BaseShape *nbase = BaseShape::getUnowned(cx, base); + if (!nbase) + return NULL; - Shape *shape = lastBinding->getChild(cx, child, &lastBinding); + Shape child(nbase, id, slot, 0, attrs, Shape::HAS_SHORTID, *indexp); + + /* Shapes in bindings cannot be dictionaries. */ + Shape *shape = lastBinding->getChildBinding(cx, child, &lastBinding); if (!shape) return false; @@ -196,7 +203,7 @@ Bindings::getLocalNameArray(JSContext *cx, Vector *namesp) for (Shape::Range r = lastBinding->all(); !r.empty(); r.popFront()) { const Shape &shape = r.front(); - uintN index = uint16(shape.shortid); + uintN index = uint16(shape.shortid()); if (shape.getter() == GetCallArg) { JS_ASSERT(index < nargs); @@ -208,10 +215,10 @@ Bindings::getLocalNameArray(JSContext *cx, Vector *namesp) index += nargs; } - if (JSID_IS_ATOM(shape.propid)) { - names[index] = JSID_TO_ATOM(shape.propid); + if (JSID_IS_ATOM(shape.propid())) { + names[index] = JSID_TO_ATOM(shape.propid()); } else { - JS_ASSERT(JSID_IS_INT(shape.propid)); + JS_ASSERT(JSID_IS_INT(shape.propid())); JS_ASSERT(shape.getter() == GetCallArg); names[index] = NULL; } @@ -277,7 +284,7 @@ void Bindings::makeImmutable() { JS_ASSERT(lastBinding); - lastBinding->freezeIfDictionary(); + JS_ASSERT(!lastBinding->inDictionary()); } void @@ -1222,10 +1229,16 @@ JSScript::NewScriptFromEmitter(JSContext *cx, BytecodeEmitter *bce) if (bce->flags & TCF_FUN_HEAVYWEIGHT) fun->flags |= JSFUN_HEAVYWEIGHT; - /* Watch for scripts whose functions will not be cloned. These are singletons. */ + /* + * Mark functions which will only be executed once as singletons. + * Skip this for flat closures, which must be copied on executing. + */ bool singleton = - cx->typeInferenceEnabled() && bce->parent && bce->parent->compiling() && - bce->parent->asBytecodeEmitter()->checkSingletonContext(); + cx->typeInferenceEnabled() && + bce->parent && + bce->parent->compiling() && + bce->parent->asBytecodeEmitter()->checkSingletonContext() && + !fun->isFlatClosure(); if (!script->typeSetFunction(cx, fun, singleton)) return NULL; @@ -1322,7 +1335,7 @@ js_CallDestroyScriptHook(JSContext *cx, JSScript *script) } void -JSScript::finalize(JSContext *cx) +JSScript::finalize(JSContext *cx, bool background) { CheckScript(this, NULL); diff --git a/js/src/jsscript.h b/js/src/jsscript.h index 5c099a6d7ef8..bea522ed8b0c 100644 --- a/js/src/jsscript.h +++ b/js/src/jsscript.h @@ -174,11 +174,9 @@ class Bindings { uint16 nargs; uint16 nvars; uint16 nupvars; - bool hasExtensibleParents; public: inline Bindings(JSContext *cx); - inline ~Bindings(); /* * Transfers ownership of bindings data from bindings into this fresh @@ -211,6 +209,12 @@ class Bindings { /* Returns the shape lineage generated for these bindings. */ inline js::Shape *lastShape() const; + /* See Scope::extensibleParents */ + inline bool extensibleParents(); + bool setExtensibleParents(JSContext *cx); + + bool setParent(JSContext *cx, JSObject *obj); + enum { /* * A script may have no more than this many arguments, variables, or @@ -294,54 +298,6 @@ class Bindings { */ void makeImmutable(); - /* - * Sometimes call objects and run-time block objects need unique shapes, but - * sometimes they don't. - * - * Property cache entries only record the shapes of the first and last - * objects along the search path, so if the search traverses more than those - * two objects, then those first and last shapes must determine the shapes - * of everything else along the path. The js_PurgeScopeChain stuff takes - * care of making this work, but that suffices only because we require that - * start points with the same shape have the same successor object in the - * search path --- a cache hit means the starting shapes were equal, which - * means the seach path tail (everything but the first object in the path) - * was shared, which in turn means the effects of a purge will be seen by - * all affected starting search points. - * - * For call and run-time block objects, the "successor object" is the scope - * chain parent. Unlike prototype objects (of which there are usually few), - * scope chain parents are created frequently (possibly on every call), so - * following the shape-implies-parent rule blindly would lead one to give - * every call and block its own shape. - * - * In many cases, however, it's not actually necessary to give call and - * block objects their own shapes, and we can do better. If the code will - * always be used with the same global object, and none of the enclosing - * call objects could have bindings added to them at runtime (by direct eval - * calls or function statements), then we can use a fixed set of shapes for - * those objects. You could think of the shapes in the functions' bindings - * and compile-time blocks as uniquely identifying the global object(s) at - * the end of the scope chain. - * - * (In fact, some JSScripts we do use against multiple global objects (see - * bug 618497), and using the fixed shapes isn't sound there.) - * - * In deciding whether a call or block has any extensible parents, we - * actually only need to consider enclosing calls; blocks are never - * extensible, and the other sorts of objects that appear in the scope - * chains ('with' blocks, say) are not CacheableNonGlobalScopes. - * - * If the hasExtensibleParents flag is set, then Call objects created for - * the function this Bindings describes need unique shapes. If the flag is - * clear, then we can use lastBinding's shape. - * - * For blocks, we set the the OWN_SHAPE flag on the compiler-generated - * blocksto indicate that their clones need unique shapes. - */ - void setExtensibleParents() { hasExtensibleParents = true; } - bool extensibleParents() const { return hasExtensibleParents; } - /* * These methods provide direct access to the shape path normally * encapsulated by js::Bindings. These methods may be used to make a @@ -490,11 +446,9 @@ struct JSScript : public js::gc::Cell { undefined properties in this script */ bool hasSingletons:1; /* script has singleton objects */ - bool hasFunction:1; /* script has an associated function */ - bool isHeavyweightFunction:1; /* function is heavyweight */ - bool isOuterFunction:1; /* function is heavyweight, with inner functions */ - bool isInnerFunction:1; /* function is directly nested in a heavyweight - * outer function */ + bool isOuterFunction:1; /* function is heavyweight, with inner functions */ + bool isInnerFunction:1; /* function is directly nested in a heavyweight + * outer function */ bool isActiveEval:1; /* script came from eval(), and is still active */ bool isCachedEval:1; /* script came from eval(), and is in eval cache */ bool usedLazyArgs:1; /* script has used lazy arguments at some point */ @@ -520,8 +474,10 @@ struct JSScript : public js::gc::Cell { * the script with 4 bytes. We use them to store tiny scripts like empty * scripts. */ +#if JS_BITS_PER_WORD == 64 #define JS_SCRIPT_INLINE_DATA_LIMIT 4 uint8 inlineData[JS_SCRIPT_INLINE_DATA_LIMIT]; +#endif const char *filename; /* source filename or null */ JSAtom **atoms; /* maps immediate index to literal struct */ @@ -556,6 +512,17 @@ struct JSScript : public js::gc::Cell { /* Execution and profiling information for JIT code in the script. */ js::ScriptOpcodeCounts pcCounters; + private: + JSFunction *function_; + public: + + /* + * Original compiled function for the script, if it has a function. + * NULL for global and eval scripts. + */ + JSFunction *function() const { return function_; } + void setFunction(JSFunction *fun) { function_ = fun; } + #ifdef JS_CRASH_DIAGNOSTICS /* All diagnostic fields must be multiples of Cell::CellSize. */ uint32 cookie2[Cell::CellSize / sizeof(uint32)]; @@ -577,15 +544,15 @@ struct JSScript : public js::gc::Cell { js::types::TypeScript *types; /* Ensure the script has a TypeScript. */ - inline bool ensureHasTypes(JSContext *cx, JSFunction *fun = NULL); + inline bool ensureHasTypes(JSContext *cx); /* * Ensure the script has scope and bytecode analysis information. * Performed when the script first runs, or first runs after a TypeScript - * GC purge. If fun/scope are NULL then the script must already have types - * with scope information. + * GC purge. If scope is NULL then the script must already have types with + * scope information. */ - inline bool ensureRanAnalysis(JSContext *cx, JSFunction *fun = NULL, JSObject *scope = NULL); + inline bool ensureRanAnalysis(JSContext *cx, JSObject *scope); /* Ensure the script has type inference analysis information. */ inline bool ensureRanInference(JSContext *cx); @@ -603,7 +570,6 @@ struct JSScript : public js::gc::Cell { inline bool hasGlobal() const; inline bool hasClearedGlobal() const; - inline JSFunction *function() const; inline js::GlobalObject *global() const; inline js::types::TypeScriptNesting *nesting() const; @@ -615,7 +581,7 @@ struct JSScript : public js::gc::Cell { } private: - bool makeTypes(JSContext *cx, JSFunction *fun); + bool makeTypes(JSContext *cx); bool makeAnalysis(JSContext *cx); public: @@ -803,7 +769,7 @@ struct JSScript : public js::gc::Cell { uint32 stepModeCount() { return stepMode & stepCountMask; } #endif - void finalize(JSContext *cx); + void finalize(JSContext *cx, bool background); static inline void writeBarrierPre(JSScript *script); static inline void writeBarrierPost(JSScript *script, void *addr); diff --git a/js/src/jsscriptinlines.h b/js/src/jsscriptinlines.h index e2db5d2d0475..e8cabd440331 100644 --- a/js/src/jsscriptinlines.h +++ b/js/src/jsscriptinlines.h @@ -48,6 +48,7 @@ #include "jsscript.h" #include "jsscope.h" +#include "vm/CallObject.h" #include "vm/GlobalObject.h" #include "vm/RegExpObject.h" @@ -57,42 +58,26 @@ namespace js { inline Bindings::Bindings(JSContext *cx) - : nargs(0), nvars(0), nupvars(0), hasExtensibleParents(false) -{ -} - -inline -Bindings::~Bindings() -{ -} + : lastBinding(NULL), nargs(0), nvars(0), nupvars(0) +{} inline void Bindings::transfer(JSContext *cx, Bindings *bindings) { JS_ASSERT(!lastBinding); + JS_ASSERT(!bindings->lastBinding || !bindings->lastBinding->inDictionary()); *this = *bindings; #ifdef DEBUG bindings->lastBinding = NULL; #endif - - /* Preserve back-pointer invariants across the lastBinding transfer. */ - if (lastBinding && lastBinding->inDictionary()) - lastBinding->listp = &this->lastBinding; } inline void Bindings::clone(JSContext *cx, Bindings *bindings) { JS_ASSERT(!lastBinding); - - /* - * Non-dictionary bindings are fine to share, as are dictionary bindings if - * they're copy-on-modification. - */ - JS_ASSERT(!bindings->lastBinding || - !bindings->lastBinding->inDictionary() || - bindings->lastBinding->frozen()); + JS_ASSERT(!bindings->lastBinding || !bindings->lastBinding->inDictionary()); *this = *bindings; } @@ -101,7 +86,7 @@ Shape * Bindings::lastShape() const { JS_ASSERT(lastBinding); - JS_ASSERT_IF(lastBinding->inDictionary(), lastBinding->frozen()); + JS_ASSERT(!lastBinding->inDictionary()); return lastBinding; } @@ -109,13 +94,24 @@ bool Bindings::ensureShape(JSContext *cx) { if (!lastBinding) { - lastBinding = EmptyShape::getEmptyCallShape(cx); + /* Get an allocation kind to match an empty call object. */ + gc::AllocKind kind = gc::FINALIZE_OBJECT4; + JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS + 1); + + lastBinding = EmptyShape::getInitialShape(cx, &CallClass, NULL, NULL, kind, + BaseShape::VAROBJ); if (!lastBinding) return false; } return true; } +bool +Bindings::extensibleParents() +{ + return lastBinding && lastBinding->extensibleParents(); +} + extern const char * CurrentScriptFileAndLineSlow(JSContext *cx, uintN *linenop); @@ -138,11 +134,8 @@ inline JSFunction * JSScript::getFunction(size_t index) { JSObject *funobj = getObject(index); - JS_ASSERT(funobj->isFunction()); - JS_ASSERT(funobj == (JSObject *) funobj->getPrivate()); - JSFunction *fun = (JSFunction *) funobj; - JS_ASSERT(fun->isInterpreted()); - return fun; + JS_ASSERT(funobj->isFunction() && funobj->toFunction()->isInterpreted()); + return funobj->toFunction(); } inline JSFunction * @@ -202,17 +195,10 @@ JSScript::hasClearedGlobal() const return obj && obj->isCleared(); } -inline JSFunction * -JSScript::function() const -{ - JS_ASSERT(hasFunction && types); - return types->function; -} - inline js::types::TypeScriptNesting * JSScript::nesting() const { - JS_ASSERT(hasFunction && types && types->hasScope()); + JS_ASSERT(function() && types && types->hasScope()); return types->nesting; } diff --git a/js/src/jsstr.cpp b/js/src/jsstr.cpp index 0854a5f9e6fa..17aa70dd93cd 100644 --- a/js/src/jsstr.cpp +++ b/js/src/jsstr.cpp @@ -1734,8 +1734,8 @@ FindReplaceLength(JSContext *cx, RegExpStatics *res, ReplaceData &rdata, size_t /* Only handle the case where the property exists and is on this object. */ if (prop && holder == base) { Shape *shape = (Shape *) prop; - if (shape->slot != SHAPE_INVALID_SLOT && shape->hasDefaultGetter()) { - Value value = base->getSlot(shape->slot); + if (shape->hasSlot() && shape->hasDefaultGetter()) { + Value value = base->getSlot(shape->slot()); if (value.isString()) { rdata.repstr = value.toString()->ensureLinear(cx); if (!rdata.repstr) @@ -2146,7 +2146,7 @@ js::str_replace(JSContext *cx, uintN argc, Value *vp) rdata.dollar = rdata.dollarEnd = NULL; if (rdata.lambda->isFunction()) { - JSFunction *fun = rdata.lambda->getFunctionPrivate(); + JSFunction *fun = rdata.lambda->toFunction(); if (fun->isInterpreted()) { /* * Pattern match the script to check if it is is indexing into a @@ -2161,7 +2161,7 @@ js::str_replace(JSContext *cx, uintN argc, Value *vp) Value table = UndefinedValue(); if (JSOp(*pc) == JSOP_GETFCSLOT) { - table = rdata.lambda->getFlatClosureUpvar(GET_UINT16(pc)); + table = fun->getFlatClosureUpvar(GET_UINT16(pc)); pc += JSOP_GETFCSLOT_LENGTH; } @@ -2955,10 +2955,9 @@ static JSFunctionSpec string_static_methods[] = { JS_FS_END }; -const Shape * +Shape * StringObject::assignInitialShape(JSContext *cx) { - JS_ASSERT(!cx->compartment->initialStringShape); JS_ASSERT(nativeEmpty()); return addDataProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom), diff --git a/js/src/jstypedarray.cpp b/js/src/jstypedarray.cpp index 46156a993d86..bce88c090a64 100644 --- a/js/src/jstypedarray.cpp +++ b/js/src/jstypedarray.cpp @@ -73,8 +73,12 @@ using namespace js; using namespace js::gc; using namespace js::types; -/* slots can only be upto 255 */ -static const uint8 ARRAYBUFFER_RESERVED_SLOTS = 16; +/* + * Allocate array buffers with the maximum number of fixed slots marked as + * reserved, so that the fixed slots may be used for the buffer's contents. + * The last fixed slot is kept for the object's private data. + */ +static const uint8 ARRAYBUFFER_RESERVED_SLOTS = JSObject::MAX_FIXED_SLOTS - 1; static bool ValueIsLength(JSContext *cx, const Value &v, jsuint *len) @@ -159,26 +163,30 @@ JSObject::allocateArrayBufferSlots(JSContext *cx, uint32 size) /* * ArrayBuffer objects delegate added properties to another JSObject, so * their internal layout can use the object's fixed slots for storage. + * Set up the object to look like an array with an elements header. */ - JS_ASSERT(isArrayBuffer() && !hasSlotsArray()); + JS_ASSERT(isArrayBuffer() && !hasDynamicSlots() && !hasDynamicElements()); - uint32 bytes = size + sizeof(Value); - if (size > sizeof(HeapValue) * ARRAYBUFFER_RESERVED_SLOTS - sizeof(HeapValue) ) { - HeapValue *tmpslots = (HeapValue *)cx->calloc_(bytes); - if (!tmpslots) + size_t usableSlots = ARRAYBUFFER_RESERVED_SLOTS - ObjectElements::VALUES_PER_HEADER; + + if (size > sizeof(Value) * usableSlots) { + ObjectElements *newheader = (ObjectElements *)cx->calloc_(size + sizeof(ObjectElements)); + if (!newheader) return false; - slots = tmpslots; - /* - * Note that |bytes| may not be a multiple of |sizeof(Value)|, so - * |capacity * sizeof(Value)| may underestimate the size by up to - * |sizeof(Value) - 1| bytes. - */ - capacity = bytes / sizeof(HeapValue); + elements = newheader->elements(); } else { - slots = fixedSlots(); - memset(slots, 0, bytes); + elements = fixedElements(); + memset(fixedSlots(), 0, size + sizeof(ObjectElements)); } - *((uint32*)slots) = size; + getElementsHeader()->length = size; + + /* + * Note that |bytes| may not be a multiple of |sizeof(Value)|, so + * |capacity * sizeof(Value)| may underestimate the size by up to + * |sizeof(Value) - 1| bytes. + */ + getElementsHeader()->capacity = size / sizeof(Value); + return true; } @@ -186,7 +194,7 @@ static JSObject * DelegateObject(JSContext *cx, JSObject *obj) { if (!obj->getPrivate()) { - JSObject *delegate = NewNonFunction(cx, &ObjectClass, obj->getProto(), NULL); + JSObject *delegate = NewObjectWithGivenProto(cx, &ObjectClass, obj->getProto(), NULL); obj->setPrivate(delegate); return delegate; } @@ -199,6 +207,7 @@ ArrayBuffer::create(JSContext *cx, int32 nbytes) JSObject *obj = NewBuiltinClassInstance(cx, &ArrayBuffer::slowClass); if (!obj) return NULL; + JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT16); if (nbytes < 0) { /* @@ -211,8 +220,13 @@ ArrayBuffer::create(JSContext *cx, int32 nbytes) } JS_ASSERT(obj->getClass() == &ArrayBuffer::slowClass); - obj->setSharedNonNativeMap(); - obj->setClass(&ArrayBufferClass); + + js::Shape *empty = EmptyShape::getInitialShape(cx, &ArrayBufferClass, + obj->getProto(), obj->getParent(), + gc::FINALIZE_OBJECT16); + if (!empty) + return false; + obj->setLastPropertyInfallible(empty); /* * The first 8 bytes hold the length. @@ -831,7 +845,7 @@ TypedArray::lengthOffset() /* static */ int TypedArray::dataOffset() { - return offsetof(JSObject, privateData); + return JSObject::getPrivateDataOffset(NUM_FIXED_SLOTS); } /* Helper clamped uint8 type */ @@ -1331,6 +1345,7 @@ class TypedArrayTemplate JSObject *obj = NewBuiltinClassInstance(cx, slowClass()); if (!obj) return NULL; + JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT8); /* * Specialize the type of the object on the current scripted location, @@ -1363,11 +1378,16 @@ class TypedArrayTemplate JS_ASSERT(getDataOffset(obj) <= offsetData(obj, bufferByteLength)); JS_ASSERT(obj->getClass() == slowClass()); - obj->setSharedNonNativeMap(); - obj->setClass(fastClass()); - // FIXME Bug 599008: make it ok to call preventExtensions here. - obj->flags |= JSObject::NOT_EXTENSIBLE; + js::Shape *empty = EmptyShape::getInitialShape(cx, fastClass(), + obj->getProto(), obj->getParent(), + gc::FINALIZE_OBJECT8, + BaseShape::NOT_EXTENSIBLE); + if (!empty) + return false; + obj->setLastPropertyInfallible(empty); + + JS_ASSERT(obj->numFixedSlots() == NUM_FIXED_SLOTS); return obj; } diff --git a/js/src/jstypedarray.h b/js/src/jstypedarray.h index f6e660a93a8c..9db2ec51c643 100644 --- a/js/src/jstypedarray.h +++ b/js/src/jstypedarray.h @@ -200,7 +200,8 @@ struct JS_FRIEND_API(TypedArray) { FIELD_BYTELENGTH, FIELD_TYPE, FIELD_BUFFER, - FIELD_MAX + FIELD_MAX, + NUM_FIXED_SLOTS = 7 }; // and MUST NOT be used to construct new objects. diff --git a/js/src/jstypedarrayinlines.h b/js/src/jstypedarrayinlines.h index 2c106f938c5e..a425a20a6aab 100644 --- a/js/src/jstypedarrayinlines.h +++ b/js/src/jstypedarrayinlines.h @@ -47,14 +47,13 @@ inline uint32 JSObject::arrayBufferByteLength() { JS_ASSERT(isArrayBuffer()); - return *((uint32*) slots); + return getElementsHeader()->length; } inline uint8 * JSObject::arrayBufferDataOffset() { - uint64 *base = ((uint64*)slots) + 1; - return (uint8*) base; + return (uint8 *) elements; } namespace js { @@ -96,7 +95,7 @@ TypedArray::getBuffer(JSObject *obj) { inline void * TypedArray::getDataOffset(JSObject *obj) { - return (void *)obj->getPrivate(); + return (void *)obj->getPrivate(NUM_FIXED_SLOTS); } } diff --git a/js/src/jswatchpoint.cpp b/js/src/jswatchpoint.cpp index ce719b4f09b2..78d6725e096a 100644 --- a/js/src/jswatchpoint.cpp +++ b/js/src/jswatchpoint.cpp @@ -86,7 +86,9 @@ WatchpointMap::watch(JSContext *cx, JSObject *obj, jsid id, JS_ASSERT(id == js_CheckForStringIndex(id)); JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id)); - obj->setWatched(cx); + if (!obj->setWatched(cx)) + return false; + Watchpoint w; w.handler = handler; w.closure = closure; @@ -147,8 +149,7 @@ WatchpointMap::triggerWatchpoint(JSContext *cx, JSObject *obj, jsid id, Value *v old.setUndefined(); if (obj->isNative()) { if (const Shape *shape = obj->nativeLookup(cx, id)) { - uint32 slot = shape->slot; - if (obj->containsSlot(slot)) { + if (shape->hasSlot()) { if (shape->isMethod()) { /* * The existing watched property is a method. Trip @@ -156,7 +157,8 @@ WatchpointMap::triggerWatchpoint(JSContext *cx, JSObject *obj, jsid id, Value *v * passing an uncloned function object to the * handler. */ - Value method = ObjectValue(shape->methodObject()); + old = UndefinedValue(); + Value method = ObjectValue(*obj->nativeGetMethod(shape)); if (!obj->methodReadBarrier(cx, *shape, &method)) return false; shape = obj->nativeLookup(cx, id); @@ -164,7 +166,7 @@ WatchpointMap::triggerWatchpoint(JSContext *cx, JSObject *obj, jsid id, Value *v JS_ASSERT(!shape->isMethod()); old = method; } else { - old = obj->nativeGetSlot(slot); + old = obj->nativeGetSlot(shape->slot()); } } } diff --git a/js/src/jsweakmap.cpp b/js/src/jsweakmap.cpp index 1120cfcedf89..4d26bc8f9f06 100644 --- a/js/src/jsweakmap.cpp +++ b/js/src/jsweakmap.cpp @@ -283,8 +283,6 @@ WeakMap_construct(JSContext *cx, uintN argc, Value *vp) if (!obj) return false; - obj->initPrivate(NULL); - vp->setObject(*obj); return true; } @@ -328,7 +326,6 @@ js_InitWeakMapClass(JSContext *cx, JSObject *obj) JSObject *weakMapProto = global->createBlankPrototype(cx, &WeakMapClass); if (!weakMapProto) return NULL; - weakMapProto->initPrivate(NULL); JSFunction *ctor = global->createConstructor(cx, WeakMap_construct, &WeakMapClass, CLASS_ATOM(cx, WeakMap), 0); diff --git a/js/src/jswrapper.cpp b/js/src/jswrapper.cpp index 4cbd6b1782e7..2798fd23eb7c 100644 --- a/js/src/jswrapper.cpp +++ b/js/src/jswrapper.cpp @@ -745,8 +745,8 @@ bool CrossCompartmentWrapper::nativeCall(JSContext *cx, JSObject *wrapper, Class *clasp, Native native, CallArgs srcArgs) { JS_ASSERT_IF(!srcArgs.calleev().isUndefined(), - srcArgs.callee().getFunctionPrivate()->native() == native || - srcArgs.callee().getFunctionPrivate()->native() == js_generic_native_method_dispatcher); + srcArgs.callee().toFunction()->native() == native || + srcArgs.callee().toFunction()->native() == js_generic_native_method_dispatcher); JS_ASSERT(&srcArgs.thisv().toObject() == wrapper); JS_ASSERT(!UnwrapObject(wrapper)->isCrossCompartmentWrapper()); diff --git a/js/src/jsxdrapi.cpp b/js/src/jsxdrapi.cpp index 19d7e1ffb321..ec70f8f89825 100644 --- a/js/src/jsxdrapi.cpp +++ b/js/src/jsxdrapi.cpp @@ -691,10 +691,7 @@ JS_XDRFunctionObject(JSXDRState *xdr, JSObject **objp) XDRScriptState fstate(xdr); if (xdr->mode == JSXDR_ENCODE) { - JSFunction* fun = (*objp)->getFunctionPrivate(); - if (!fun) - return false; - + JSFunction* fun = (*objp)->toFunction(); fstate.filename = fun->script()->filename; } diff --git a/js/src/jsxml.cpp b/js/src/jsxml.cpp index 8bc3dedcbb13..9a5ce1eaf40e 100644 --- a/js/src/jsxml.cpp +++ b/js/src/jsxml.cpp @@ -189,10 +189,7 @@ NewBuiltinClassInstanceXML(JSContext *cx, Class *clasp) if (!cx->runningWithTrustedPrincipals()) ++sE4XObjectsCreated; - JSObject *obj = NewBuiltinClassInstance(cx, clasp); - if (obj) - obj->syncSpecialEquality(); - return obj; + return NewBuiltinClassInstance(cx, clasp); } #define DEFINE_GETTER(name,code) \ @@ -524,7 +521,7 @@ NewXMLAttributeName(JSContext *cx, JSLinearString *uri, JSLinearString *prefix, * exposed to scripts. */ JSObject *parent = GetGlobalForScopeChain(cx); - JSObject *obj = NewNonFunction(cx, &AttributeNameClass, NULL, parent); + JSObject *obj = NewObjectWithGivenProto(cx, &AttributeNameClass, NULL, parent); if (!obj) return NULL; JS_ASSERT(obj->isQName()); @@ -1188,7 +1185,7 @@ static const char xml_namespace_str[] = "http://www.w3.org/XML/1998/namespace"; static const char xmlns_namespace_str[] = "http://www.w3.org/2000/xmlns/"; void -JSXML::finalize(JSContext *cx) +JSXML::finalize(JSContext *cx, bool builtin) { if (JSXML_HAS_KIDS(this)) { xml_kids.finish(cx); @@ -5458,7 +5455,7 @@ StartNonListXMLMethod(JSContext *cx, jsval *vp, JSObject **objp) } } - fun = JSVAL_TO_OBJECT(*vp)->getFunctionPrivate(); + fun = JSVAL_TO_OBJECT(*vp)->toFunction(); JS_snprintf(numBuf, sizeof numBuf, "%u", xml->xml_kids.length); JSAutoByteString funNameBytes; if (const char *funName = GetFunctionNameBytes(cx, fun, &funNameBytes)) { @@ -7384,7 +7381,7 @@ NewXMLObject(JSContext *cx, JSXML *xml) JSObject *obj; JSObject *parent = GetGlobalForScopeChain(cx); - obj = NewNonFunction(cx, &XMLClass, NULL, parent); + obj = NewObjectWithClassProto(cx, &XMLClass, NULL, parent); if (!obj) return NULL; obj->setPrivate(xml); @@ -7422,7 +7419,6 @@ js_InitNamespaceClass(JSContext *cx, JSObject *obj) JSFlatString *empty = cx->runtime->emptyString; namespaceProto->setNamePrefix(empty); namespaceProto->setNameURI(empty); - namespaceProto->syncSpecialEquality(); const uintN NAMESPACE_CTOR_LENGTH = 2; JSFunction *ctor = global->createConstructor(cx, Namespace, &NamespaceClass, @@ -7456,7 +7452,6 @@ js_InitQNameClass(JSContext *cx, JSObject *obj) JSAtom *empty = cx->runtime->emptyString; if (!InitXMLQName(cx, qnameProto, empty, empty, empty)) return NULL; - qnameProto->syncSpecialEquality(); const uintN QNAME_CTOR_LENGTH = 2; JSFunction *ctor = global->createConstructor(cx, QName, &QNameClass, @@ -7569,7 +7564,8 @@ GlobalObject::getFunctionNamespace(JSContext *cx, Value *vp) * names, its prefix and uri references are copied to the QName. * The parent remains set and links back to global. */ - obj->clearType(); + if (!obj->clearType(cx)) + return false; v.set(compartment(), ObjectValue(*obj)); } @@ -7606,7 +7602,7 @@ js_GetDefaultXMLNamespace(JSContext *cx, jsval *vp) return false; obj = NULL; - for (tmp = scopeChain; tmp; tmp = tmp->getParent()) { + for (tmp = scopeChain; tmp; tmp = tmp->scopeChain()) { Class *clasp = tmp->getClass(); if (clasp == &BlockClass || clasp == &WithClass) continue; @@ -7722,7 +7718,7 @@ js_GetAnyName(JSContext *cx, jsid *idp) JSObject *global = cx->hasfp() ? cx->fp()->scopeChain().getGlobal() : cx->globalObject; Value v = global->getReservedSlot(JSProto_AnyName); if (v.isUndefined()) { - JSObject *obj = NewNonFunction(cx, &AnyNameClass, NULL, global); + JSObject *obj = NewObjectWithGivenProto(cx, &AnyNameClass, NULL, global); if (!obj) return false; @@ -7804,7 +7800,7 @@ js_FindXMLProperty(JSContext *cx, const Value &nameval, JSObject **objp, jsid *i return JS_TRUE; } } - } while ((obj = obj->getParent()) != NULL); + } while ((obj = obj->scopeChain()) != NULL); JSAutoByteString printable; JSString *str = ConvertQNameToString(cx, nameobj); @@ -7990,7 +7986,7 @@ js_StepXMLListFilter(JSContext *cx, JSBool initialized) } JSObject *parent = GetGlobalForScopeChain(cx); - filterobj = NewNonFunction(cx, &js_XMLFilterClass, NULL, parent); + filterobj = NewObjectWithGivenProto(cx, &js_XMLFilterClass, NULL, parent); if (!filterobj) return JS_FALSE; diff --git a/js/src/jsxml.h b/js/src/jsxml.h index 4a82e1fd7e45..22d42acd8ed5 100644 --- a/js/src/jsxml.h +++ b/js/src/jsxml.h @@ -204,7 +204,7 @@ struct JSXML : js::gc::Cell { void *pad; #endif - void finalize(JSContext *cx); + void finalize(JSContext *cx, bool background); static void writeBarrierPre(JSXML *xml); static void writeBarrierPost(JSXML *xml, void *addr); @@ -225,18 +225,6 @@ js_NewXMLObject(JSContext *cx, JSXMLClass xml_class); extern JSObject * js_GetXMLObject(JSContext *cx, JSXML *xml); -/* - * Methods to test whether an object or a value is of type "xml" (per typeof). - */ - -#define VALUE_IS_XML(v) (!JSVAL_IS_PRIMITIVE(v) && JSVAL_TO_OBJECT(v)->isXML()) - -static inline bool -IsXML(const js::Value &v) -{ - return v.isObject() && v.toObject().isXML(); -} - extern JSObject * js_InitNamespaceClass(JSContext *cx, JSObject *obj); diff --git a/js/src/methodjit/BaseAssembler.h b/js/src/methodjit/BaseAssembler.h index 72d8b843273f..c2981c772a67 100644 --- a/js/src/methodjit/BaseAssembler.h +++ b/js/src/methodjit/BaseAssembler.h @@ -180,22 +180,20 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist return differenceBetween(startLabel, l); } - void load32FromImm(void *ptr, RegisterID reg) { - load32(ptr, reg); + void loadPtrFromImm(void *ptr, RegisterID reg) { + loadPtr(ptr, reg); } void loadShape(RegisterID obj, RegisterID shape) { - load32(Address(obj, offsetof(JSObject, objShape)), shape); + loadPtr(Address(obj, JSObject::offsetOfShape()), shape); + } + + Jump guardShape(RegisterID objReg, const Shape *shape) { + return branchPtr(NotEqual, Address(objReg, JSObject::offsetOfShape()), ImmPtr(shape)); } Jump guardShape(RegisterID objReg, JSObject *obj) { - return branch32(NotEqual, Address(objReg, offsetof(JSObject, objShape)), - Imm32(obj->shape())); - } - - Jump testFunction(Condition cond, RegisterID fun) { - return branchPtr(cond, Address(fun, JSObject::offsetOfClassPointer()), - ImmPtr(&FunctionClass)); + return guardShape(objReg, obj->lastProperty()); } /* @@ -767,14 +765,15 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist Jump holeCheck; }; - // Guard an array's capacity, length or initialized length. - Jump guardArrayExtent(uint32 offset, RegisterID objReg, const Int32Key &key, Condition cond) { - Address initlen(objReg, offset); + // Guard an extent (capacity, length or initialized length) on an array or typed array. + Jump guardArrayExtent(int offset, RegisterID reg, + const Int32Key &key, Condition cond) { + Address extent(reg, offset); if (key.isConstant()) { JS_ASSERT(key.index() >= 0); - return branch32(cond, initlen, Imm32(key.index())); + return branch32(cond, extent, Imm32(key.index())); } - return branch32(cond, initlen, key.reg()); + return branch32(cond, extent, key.reg()); } // Load a jsval from an array slot, given a key. |objReg| is clobbered. @@ -782,19 +781,19 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist RegisterID typeReg, RegisterID dataReg) { JS_ASSERT(objReg != typeReg); - FastArrayLoadFails fails; - fails.rangeCheck = guardArrayExtent(JSObject::offsetOfInitializedLength(), - objReg, key, BelowOrEqual); + RegisterID elementsReg = objReg; + loadPtr(Address(objReg, JSObject::offsetOfElements()), elementsReg); - RegisterID dslotsReg = objReg; - loadPtr(Address(objReg, JSObject::offsetOfSlots()), dslotsReg); + FastArrayLoadFails fails; + fails.rangeCheck = guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + objReg, key, BelowOrEqual); // Load the slot out of the array. if (key.isConstant()) { - Address slot(objReg, key.index() * sizeof(Value)); + Address slot(elementsReg, key.index() * sizeof(Value)); fails.holeCheck = fastArrayLoadSlot(slot, true, typeReg, dataReg); } else { - BaseIndex slot(objReg, key.reg(), JSVAL_SCALE); + BaseIndex slot(elementsReg, key.reg(), JSVAL_SCALE); fails.holeCheck = fastArrayLoadSlot(slot, true, typeReg, dataReg); } @@ -829,16 +828,27 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist addPtr(JSFrameReg, reg); } - void loadObjClass(RegisterID objReg, RegisterID destReg) { - loadPtr(Address(objReg, JSObject::offsetOfClassPointer()), destReg); + void loadBaseShape(RegisterID obj, RegisterID dest) { + loadPtr(Address(obj, JSObject::offsetOfShape()), dest); + loadPtr(Address(dest, Shape::offsetOfBase()), dest); + } + + void loadObjClass(RegisterID obj, RegisterID dest) { + loadBaseShape(obj, dest); + loadPtr(Address(dest, BaseShape::offsetOfClass()), dest); } Jump testClass(Condition cond, RegisterID claspReg, js::Class *clasp) { return branchPtr(cond, claspReg, ImmPtr(clasp)); } - Jump testObjClass(Condition cond, RegisterID objReg, js::Class *clasp) { - return branchPtr(cond, Address(objReg, JSObject::offsetOfClassPointer()), ImmPtr(clasp)); + Jump testObjClass(Condition cond, RegisterID obj, RegisterID temp, js::Class *clasp) { + loadBaseShape(obj, temp); + return branchPtr(cond, Address(temp, BaseShape::offsetOfClass()), ImmPtr(clasp)); + } + + Jump testFunction(Condition cond, RegisterID fun, RegisterID temp) { + return testObjClass(cond, fun, temp, &js::FunctionClass); } void branchValue(Condition cond, RegisterID reg, int32 value, RegisterID result) @@ -886,13 +896,10 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist const js::Shape *shape, RegisterID typeReg, RegisterID dataReg) { - JS_ASSERT(shape->hasSlot()); - if (shape->isMethod()) - loadValueAsComponents(ObjectValue(shape->methodObject()), typeReg, dataReg); - else if (obj->isFixedSlot(shape->slot)) - loadInlineSlot(objReg, shape->slot, typeReg, dataReg); + if (obj->isFixedSlot(shape->slot())) + loadInlineSlot(objReg, shape->slot(), typeReg, dataReg); else - loadDynamicSlot(objReg, obj->dynamicSlotIndex(shape->slot), typeReg, dataReg); + loadDynamicSlot(objReg, obj->dynamicSlotIndex(shape->slot()), typeReg, dataReg); } #ifdef JS_METHODJIT_TYPED_ARRAY @@ -1213,10 +1220,6 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist loadPayload(address, reg); - Jump notSingleton = branchTest32(Assembler::Zero, - Address(reg, offsetof(JSObject, flags)), - Imm32(JSObject::SINGLETON_TYPE)); - for (unsigned i = 0; i < count; i++) { if (JSObject *object = types->getSingleObject(i)) { if (!matches.append(branchPtr(Assembler::Equal, reg, ImmPtr(object)))) @@ -1224,11 +1227,6 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist } } - if (!mismatches->append(jump())) - return false; - - notSingleton.linkTo(label(), this); - loadPtr(Address(reg, JSObject::offsetOfType()), reg); for (unsigned i = 0; i < count; i++) { @@ -1261,7 +1259,8 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist int thingSize = (int)gc::Arena::thingSize(allocKind); JS_ASSERT(cx->typeInferenceEnabled()); - JS_ASSERT(!templateObject->hasSlotsArray()); + JS_ASSERT(!templateObject->hasDynamicSlots()); + JS_ASSERT(!templateObject->hasDynamicElements()); #ifdef JS_GC_ZEAL if (cx->runtime->needZealousGC()) @@ -1291,43 +1290,52 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist * pinned against GC either by the script or by some type object. */ + int elementsOffset = JSObject::offsetOfFixedElements(); + /* - * Write out the slots pointer before readjusting the result register, + * Write out the elements pointer before readjusting the result register, * as for dense arrays we will need to get the address of the fixed - * slots first. + * elements first. */ if (templateObject->isDenseArray()) { - JS_ASSERT(!templateObject->initializedLength()); - addPtr(Imm32(-thingSize + sizeof(JSObject)), result); - storePtr(result, Address(result, -(int)sizeof(JSObject) + JSObject::offsetOfSlots())); - addPtr(Imm32(-(int)sizeof(JSObject)), result); + JS_ASSERT(!templateObject->getDenseArrayInitializedLength()); + addPtr(Imm32(-thingSize + elementsOffset), result); + storePtr(result, Address(result, -elementsOffset + JSObject::offsetOfElements())); + addPtr(Imm32(-elementsOffset), result); } else { - JS_ASSERT(!templateObject->newType); addPtr(Imm32(-thingSize), result); - storePtr(ImmPtr(NULL), Address(result, JSObject::offsetOfSlots())); + storePtr(ImmPtr(emptyObjectElements), Address(result, JSObject::offsetOfElements())); } - storePtr(ImmPtr(templateObject->lastProp), Address(result, offsetof(JSObject, lastProp))); - storePtr(ImmPtr(templateObject->getClass()), Address(result, JSObject::offsetOfClassPointer())); - store32(Imm32(templateObject->flags), Address(result, offsetof(JSObject, flags))); - store32(Imm32(templateObject->objShape), Address(result, offsetof(JSObject, objShape))); - storePtr(ImmPtr(templateObject->newType), Address(result, offsetof(JSObject, newType))); - storePtr(ImmPtr(templateObject->parent), Address(result, offsetof(JSObject, parent))); - storePtr(ImmPtr(templateObject->privateData), Address(result, offsetof(JSObject, privateData))); - storePtr(ImmPtr((void *) templateObject->capacity), Address(result, offsetof(JSObject, capacity))); + storePtr(ImmPtr(templateObject->lastProperty()), Address(result, JSObject::offsetOfShape())); storePtr(ImmPtr(templateObject->type()), Address(result, JSObject::offsetOfType())); + storePtr(ImmPtr(NULL), Address(result, JSObject::offsetOfSlots())); - /* - * Fixed slots of non-array objects are required to be initialized; - * Use the values currently in the template object. - */ - if (!templateObject->isDenseArray()) { - for (unsigned i = 0; i < templateObject->numFixedSlots(); i++) { + if (templateObject->isDenseArray()) { + /* Fill in the elements header. */ + store32(Imm32(templateObject->getDenseArrayCapacity()), + Address(result, elementsOffset + ObjectElements::offsetOfCapacity())); + store32(Imm32(templateObject->getDenseArrayInitializedLength()), + Address(result, elementsOffset + ObjectElements::offsetOfInitializedLength())); + store32(Imm32(templateObject->getArrayLength()), + Address(result, elementsOffset + ObjectElements::offsetOfLength())); + } else { + /* + * Fixed slots of non-array objects are required to be initialized; + * Use the values currently in the template object. + */ + for (unsigned i = 0; i < templateObject->slotSpan(); i++) { storeValue(templateObject->getFixedSlot(i), Address(result, JSObject::getFixedSlotOffset(i))); } } + if (templateObject->hasPrivate()) { + uint32 nfixed = templateObject->numFixedSlots(); + storePtr(ImmPtr(templateObject->getPrivate()), + Address(result, JSObject::getPrivateDataOffset(nfixed))); + } + return jump; } diff --git a/js/src/methodjit/BaseCompiler.h b/js/src/methodjit/BaseCompiler.h index b652cf6a9572..2395d78469cb 100644 --- a/js/src/methodjit/BaseCompiler.h +++ b/js/src/methodjit/BaseCompiler.h @@ -75,6 +75,7 @@ struct MacroAssemblerTypedefs { typedef JSC::RepatchBuffer RepatchBuffer; typedef JSC::CodeLocationLabel CodeLocationLabel; typedef JSC::CodeLocationDataLabel32 CodeLocationDataLabel32; + typedef JSC::CodeLocationDataLabelPtr CodeLocationDataLabelPtr; typedef JSC::CodeLocationJump CodeLocationJump; typedef JSC::CodeLocationCall CodeLocationCall; typedef JSC::CodeLocationInstruction CodeLocationInstruction; diff --git a/js/src/methodjit/Compiler.cpp b/js/src/methodjit/Compiler.cpp index 7927c5e04b18..a88c7f609e23 100644 --- a/js/src/methodjit/Compiler.cpp +++ b/js/src/methodjit/Compiler.cpp @@ -165,7 +165,7 @@ mjit::Compiler::compile() : (*jit)->invokeEntry; } else if (status != Compile_Retry) { *checkAddr = JS_UNJITTABLE_SCRIPT; - if (outerScript->hasFunction) { + if (outerScript->function()) { outerScript->uninlineable = true; types::MarkTypeObjectFlags(cx, outerScript->function(), types::OBJECT_FLAG_UNINLINEABLE); @@ -183,7 +183,7 @@ mjit::Compiler::checkAnalysis(JSScript *script) return Compile_Abort; } - if (!script->ensureRanAnalysis(cx)) + if (!script->ensureRanAnalysis(cx, NULL)) return Compile_Error; if (cx->typeInferenceEnabled() && !script->ensureRanInference(cx)) return Compile_Error; @@ -232,8 +232,8 @@ mjit::Compiler::scanInlineCalls(uint32 index, uint32 depth) /* Don't inline from functions which could have a non-global scope object. */ if (!script->hasGlobal() || script->global() != globalObj || - (script->hasFunction && script->function()->getParent() != globalObj) || - (script->hasFunction && script->function()->isHeavyweight()) || + (script->function() && script->function()->getParent() != globalObj) || + (script->function() && script->function()->isHeavyweight()) || script->isActiveEval) { return Compile_Okay; } @@ -298,7 +298,7 @@ mjit::Compiler::scanInlineCalls(uint32 index, uint32 depth) break; } - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); if (!fun->isInterpreted()) { okay = false; break; @@ -390,7 +390,7 @@ mjit::Compiler::scanInlineCalls(uint32 index, uint32 depth) if (!obj) continue; - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); JSScript *script = fun->script(); CompileStatus status = addInlineFrame(script, nextDepth, index, pc); @@ -431,7 +431,7 @@ mjit::Compiler::pushActiveFrame(JSScript *script, uint32 argc) #ifdef JS_METHODJIT_SPEW if (cx->typeInferenceEnabled() && IsJaegerSpewChannelActive(JSpew_Regalloc)) { - unsigned nargs = script->hasFunction ? script->function()->nargs : 0; + unsigned nargs = script->function() ? script->function()->nargs : 0; for (unsigned i = 0; i < nargs; i++) { uint32 slot = ArgSlot(i); if (!newAnalysis->slotEscapes(slot)) { @@ -667,7 +667,7 @@ mjit::Compiler::generatePrologue() * If there is no function, then this can only be called via JaegerShot(), * which expects an existing frame to be initialized like the interpreter. */ - if (script->hasFunction) { + if (script->function()) { Jump j = masm.jump(); /* @@ -679,7 +679,8 @@ mjit::Compiler::generatePrologue() Label fastPath = masm.label(); /* Store this early on so slow paths can access it. */ - masm.storePtr(ImmPtr(script->function()), Address(JSFrameReg, StackFrame::offsetOfExec())); + masm.storePtr(ImmPtr(script->function()), + Address(JSFrameReg, StackFrame::offsetOfExec())); { /* @@ -712,7 +713,8 @@ mjit::Compiler::generatePrologue() this->argsCheckStub = stubcc.masm.label(); this->argsCheckJump.linkTo(this->argsCheckStub, &stubcc.masm); #endif - stubcc.masm.storePtr(ImmPtr(script->function()), Address(JSFrameReg, StackFrame::offsetOfExec())); + stubcc.masm.storePtr(ImmPtr(script->function()), + Address(JSFrameReg, StackFrame::offsetOfExec())); OOL_STUBCALL(stubs::CheckArgumentTypes, REJOIN_CHECK_ARGUMENTS); #ifdef JS_MONOIC this->argsCheckFallthrough = stubcc.masm.label(); @@ -769,7 +771,7 @@ mjit::Compiler::generatePrologue() Jump hasScope = masm.branchTest32(Assembler::NonZero, FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN)); masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0); - masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0); + masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0); masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain())); hasScope.linkTo(masm.label(), &masm); } @@ -824,7 +826,7 @@ mjit::Compiler::generatePrologue() if (cx->typeInferenceEnabled()) { #ifdef DEBUG - if (script->hasFunction) { + if (script->function()) { prepareStubCall(Uses(0)); INLINE_STUBCALL(stubs::AssertArgumentTypes, REJOIN_NONE); } @@ -861,7 +863,7 @@ void mjit::Compiler::ensureDoubleArguments() { /* Convert integer arguments which were inferred as (int|double) to doubles. */ - for (uint32 i = 0; script->hasFunction && i < script->function()->nargs; i++) { + for (uint32 i = 0; script->function() && i < script->function()->nargs; i++) { uint32 slot = ArgSlot(i); if (a->varTypes[slot].type == JSVAL_TYPE_DOUBLE && analysis->trackSlot(slot)) frame.ensureDouble(frame.getArg(i)); @@ -998,7 +1000,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp) jit->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size()); jit->invokeEntry = result; jit->singleStepMode = script->stepModeEnabled(); - if (script->hasFunction) { + if (script->function()) { jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress(); jit->argsCheckEntry = stubCode.locationOf(argsCheckLabel).executableAddress(); jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress(); @@ -1103,7 +1105,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp) #if defined JS_MONOIC JS_INIT_CLIST(&jit->callers); - if (script->hasFunction && cx->typeInferenceEnabled()) { + if (script->function() && cx->typeInferenceEnabled()) { jit->argsCheckStub = stubCode.locationOf(argsCheckStub); jit->argsCheckFallthrough = stubCode.locationOf(argsCheckFallthrough); jit->argsCheckJump = stubCode.locationOf(argsCheckJump); @@ -1213,7 +1215,6 @@ mjit::Compiler::finishThisUp(JITScript **jitp) jitCallICs[i].call = &jitCallSites[callICs[i].callIndex]; jitCallICs[i].frameSize = callICs[i].frameSize; jitCallICs[i].funObjReg = callICs[i].funObjReg; - jitCallICs[i].funPtrReg = callICs[i].funPtrReg; stubCode.patch(callICs[i].addrLabel1, &jitCallICs[i]); stubCode.patch(callICs[i].addrLabel2, &jitCallICs[i]); } @@ -1278,10 +1279,10 @@ mjit::Compiler::finishThisUp(JITScript **jitp) to.inlineTypeGuard = inlineTypeGuard; JS_ASSERT(to.inlineTypeGuard == inlineTypeGuard); } - int inlineClaspGuard = fullCode.locationOf(from.claspGuard) - + int inlineShapeGuard = fullCode.locationOf(from.shapeGuard) - fullCode.locationOf(from.fastPathStart); - to.inlineClaspGuard = inlineClaspGuard; - JS_ASSERT(to.inlineClaspGuard == inlineClaspGuard); + to.inlineShapeGuard = inlineShapeGuard; + JS_ASSERT(to.inlineShapeGuard == inlineShapeGuard); stubCode.patch(from.paramAddr, &to); } @@ -1308,10 +1309,10 @@ mjit::Compiler::finishThisUp(JITScript **jitp) else to.keyReg = from.key.reg(); - int inlineClaspGuard = fullCode.locationOf(from.claspGuard) - + int inlineShapeGuard = fullCode.locationOf(from.shapeGuard) - fullCode.locationOf(from.fastPathStart); - to.inlineClaspGuard = inlineClaspGuard; - JS_ASSERT(to.inlineClaspGuard == inlineClaspGuard); + to.inlineShapeGuard = inlineShapeGuard; + JS_ASSERT(to.inlineShapeGuard == inlineShapeGuard); int inlineHoleGuard = fullCode.locationOf(from.holeGuard) - fullCode.locationOf(from.fastPathStart); @@ -1391,7 +1392,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp) JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size()); JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size()); - Probes::registerMJITCode(cx, jit, script, script->hasFunction ? script->function() : NULL, + Probes::registerMJITCode(cx, jit, script, script->function() ? script->function() : NULL, (mjit::Compiler_ActiveFrame**) inlineFrames.begin(), result, masm.size(), result + masm.size(), stubcc.size()); @@ -2677,7 +2678,7 @@ mjit::Compiler::generateMethod() frame.pop(); // obj->getFlatClosureUpvars() - Address upvarAddress(reg, JSObject::getFlatClosureUpvarsOffset()); + Address upvarAddress(reg, JSFunction::getFlatClosureUpvarsOffset()); masm.loadPrivate(upvarAddress, reg); // push ((Value *) reg)[index] @@ -2830,15 +2831,6 @@ mjit::Compiler::generateMethod() } END_CASE(JSOP_DEBUGGER) - BEGIN_CASE(JSOP_UNBRAND) - jsop_unbrand(); - END_CASE(JSOP_UNBRAND) - - BEGIN_CASE(JSOP_UNBRANDTHIS) - prepareStubCall(Uses(1)); - INLINE_STUBCALL(stubs::UnbrandThis, REJOIN_FALLTHROUGH); - END_CASE(JSOP_UNBRANDTHIS) - default: /* Sorry, this opcode isn't implemented yet. */ #ifdef JS_METHODJIT_SPEW @@ -3384,7 +3376,7 @@ mjit::Compiler::emitInlineReturnValue(FrameEntry *fe) void mjit::Compiler::emitReturn(FrameEntry *fe) { - JS_ASSERT_IF(!script->hasFunction, JSOp(*PC) == JSOP_STOP); + JS_ASSERT_IF(!script->function(), JSOp(*PC) == JSOP_STOP); /* Only the top of the stack can be returned. */ JS_ASSERT_IF(fe, fe == frame.peek(-1)); @@ -3440,7 +3432,7 @@ mjit::Compiler::emitReturn(FrameEntry *fe) * even on the entry frame. To avoid double-putting, EnterMethodJIT clears * out the entry frame's activation objects. */ - if (script->hasFunction) { + if (script->function()) { types::TypeScriptNesting *nesting = script->nesting(); if (script->function()->isHeavyweight() || (nesting && nesting->children)) { prepareStubCall(Uses(fe ? 1 : 0)); @@ -3639,16 +3631,25 @@ mjit::Compiler::checkCallApplySpeculation(uint32 callImmArgc, uint32 speculatedA { JS_ASSERT(IsLowerableFunCallOrApply(PC)); + RegisterID temp; + Registers tempRegs(Registers::AvailRegs); + if (origCalleeType.isSet()) + tempRegs.takeReg(origCalleeType.reg()); + tempRegs.takeReg(origCalleeData); + if (origThisType.isSet()) + tempRegs.takeReg(origThisType.reg()); + tempRegs.takeReg(origThisData); + temp = tempRegs.takeAnyReg().reg(); + /* * if (origCallee.isObject() && * origCallee.toObject().isFunction && - * origCallee.toObject().getFunctionPrivate() == js_fun_{call,apply}) + * origCallee.toObject().toFunction() == js_fun_{call,apply}) */ MaybeJump isObj; if (origCalleeType.isSet()) isObj = masm.testObject(Assembler::NotEqual, origCalleeType.reg()); - Jump isFun = masm.testFunction(Assembler::NotEqual, origCalleeData); - masm.loadObjPrivate(origCalleeData, origCalleeData); + Jump isFun = masm.testFunction(Assembler::NotEqual, origCalleeData, temp); Native native = *PC == JSOP_FUNCALL ? js_fun_call : js_fun_apply; Jump isNative = masm.branchPtr(Assembler::NotEqual, Address(origCalleeData, JSFunction::offsetOfNativeOrScript()), @@ -3887,8 +3888,8 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew, FrameSize notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg()); /* - * For an optimized apply, keep icCalleeData and funPtrReg in a - * callee-saved registers for the subsequent ic::SplatApplyArgs call. + * For an optimized apply, keep icCalleeData in a callee-saved register for + * the subsequent ic::SplatApplyArgs call. */ Registers tempRegs(Registers::AvailRegs); if (callIC.frameSize.isDynamic() && !Registers::isSaved(icCalleeData)) { @@ -3898,7 +3899,6 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew, FrameSize } else { tempRegs.takeReg(icCalleeData); } - RegisterID funPtrReg = tempRegs.takeAnyReg(Registers::SavedRegs).reg(); /* Reserve space just before initialization of funGuard. */ RESERVE_IC_SPACE(masm); @@ -3920,16 +3920,16 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew, FrameSize stubcc.linkExitDirect(j, stubcc.masm.label()); callIC.slowPathStart = stubcc.masm.label(); + RegisterID tmp = tempRegs.takeAnyReg().reg(); + /* * Test if the callee is even a function. If this doesn't match, we * take a _really_ slow path later. */ - Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, icCalleeData); + Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, icCalleeData, tmp); /* Test if the function is scripted. */ - RegisterID tmp = tempRegs.takeAnyReg().reg(); - stubcc.masm.loadObjPrivate(icCalleeData, funPtrReg); - stubcc.masm.load16(Address(funPtrReg, offsetof(JSFunction, flags)), tmp); + stubcc.masm.load16(Address(icCalleeData, offsetof(JSFunction, flags)), tmp); stubcc.masm.and32(Imm32(JSFUN_KINDMASK), tmp); Jump isNative = stubcc.masm.branch32(Assembler::Below, tmp, Imm32(JSFUN_INTERPRETED)); tempRegs.putReg(tmp); @@ -3967,7 +3967,6 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew, FrameSize } callIC.funObjReg = icCalleeData; - callIC.funPtrReg = funPtrReg; /* * The IC call either returns NULL, meaning call completed, or a @@ -4613,9 +4612,12 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, JSValueType knownType, if (rejoin == REJOIN_GETTER) testPushedType(rejoin, -1); } + RegisterID result = frame.allocReg(); RegisterID reg = frame.tempRegForData(top); frame.pop(); - frame.pushWord(Address(reg, offsetof(JSObject, privateData)), JSVAL_TYPE_INT32); + masm.loadPtr(Address(reg, JSObject::offsetOfElements()), result); + masm.load32(Address(result, ObjectElements::offsetOfLength()), result); + frame.pushTypedPayload(JSVAL_TYPE_INT32, result); if (script->pcCounters) bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE); if (!isObject) @@ -4790,10 +4792,9 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, JSValueType knownType, masm.loadShape(objReg, shapeReg); pic.shapeGuard = masm.label(); - DataLabel32 inlineShapeLabel; - Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg, - Imm32(int32(INVALID_SHAPE)), - inlineShapeLabel); + DataLabelPtr inlineShapeLabel; + Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg, + inlineShapeLabel, ImmPtr(NULL)); Label inlineShapeJump = masm.label(); RESERVE_OOL_SPACE(stubcc.masm); @@ -4807,7 +4808,7 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, JSValueType knownType, testPushedType(rejoin, -1); /* Load the base slot address. */ - Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)), + Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()), objReg); /* Copy the slot value to the expression stack. */ @@ -4827,11 +4828,7 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, JSValueType knownType, labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad); if (pic.hasTypeCheck) labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck); -#ifdef JS_CPU_X64 - labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump); -#else labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump); -#endif CHECK_IC_SPACE(); @@ -4915,10 +4912,9 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom) masm.loadShape(objReg, shapeReg); pic.shapeGuard = masm.label(); - DataLabel32 inlineShapeLabel; - Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg, - Imm32(int32(INVALID_SHAPE)), - inlineShapeLabel); + DataLabelPtr inlineShapeLabel; + Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg, + inlineShapeLabel, ImmPtr(NULL)); Label inlineShapeJump = masm.label(); /* Slow path. */ @@ -4932,7 +4928,7 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom) testPushedType(REJOIN_FALLTHROUGH, -1); /* Load the base slot address. */ - Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)), + Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()), objReg); /* Copy the slot value to the expression stack. */ @@ -4952,11 +4948,7 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom) labels.setInlineShapeOffset(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel)); labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad); labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck); -#ifdef JS_CPU_X64 - labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump); -#else labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump); -#endif CHECK_IC_SPACE(); @@ -5063,10 +5055,9 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom) masm.loadShape(objReg, shapeReg); pic.shapeGuard = masm.label(); - DataLabel32 inlineShapeLabel; - Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg, - Imm32(int32(INVALID_SHAPE)), - inlineShapeLabel); + DataLabelPtr inlineShapeLabel; + Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg, + inlineShapeLabel, ImmPtr(NULL)); Label inlineShapeJump = masm.label(); /* Slow path. */ @@ -5080,7 +5071,7 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom) testPushedType(REJOIN_FALLTHROUGH, -1); /* Load the base slot address. */ - Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)), + Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()), objReg); /* Copy the slot value to the expression stack. */ @@ -5115,11 +5106,7 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom) labels.setDslotsLoadOffset(masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel)); labels.setInlineShapeOffset(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel)); labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad); -#ifdef JS_CPU_X64 - labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump); -#else labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump); -#endif stubcc.rejoin(Changes(2)); pics.append(pic); @@ -5164,7 +5151,7 @@ mjit::Compiler::testSingletonProperty(JSObject *obj, jsid id) if (shape->hasDefaultGetter()) { if (!shape->hasSlot()) return false; - if (holder->getSlot(shape->slot).isUndefined()) + if (holder->getSlot(shape->slot()).isUndefined()) return false; } else if (!shape->isMethod()) { return false; @@ -5625,10 +5612,9 @@ mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache, bool popGuaranteed /* Guard on shape. */ masm.loadShape(objReg, shapeReg); pic.shapeGuard = masm.label(); - DataLabel32 inlineShapeData; - Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg, - Imm32(int32(INVALID_SHAPE)), - inlineShapeData); + DataLabelPtr inlineShapeData; + Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg, + inlineShapeData, ImmPtr(NULL)); Label afterInlineShapeJump = masm.label(); /* Slow path. */ @@ -5642,7 +5628,7 @@ mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache, bool popGuaranteed } /* Load dslots. */ - Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)), + Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()), objReg); /* Store RHS into object slot. */ @@ -5667,8 +5653,8 @@ mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache, bool popGuaranteed SetPropLabels &labels = pic.setPropLabels(); labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeData); - labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel, vr); - labels.setInlineValueStore(masm, pic.fastPathRejoin, inlineValueStore, vr); + labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel); + labels.setInlineValueStore(masm, pic.fastPathRejoin, inlineValueStore); labels.setInlineShapeJump(masm, pic.shapeGuard, afterInlineShapeJump); pics.append(pic); @@ -5872,11 +5858,13 @@ mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache) RESERVE_IC_SPACE(masm); pic.fastPathStart = masm.label(); - Address parent(pic.objReg, offsetof(JSObject, parent)); masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg); + masm.loadPtr(Address(pic.objReg, JSObject::offsetOfShape()), pic.shapeReg); + masm.loadPtr(Address(pic.shapeReg, Shape::offsetOfBase()), pic.shapeReg); + Address parent(pic.shapeReg, BaseShape::offsetOfParent()); pic.shapeGuard = masm.label(); - Jump inlineJump = masm.branchPtr(Assembler::NotEqual, parent, ImmPtr(0)); + Jump inlineJump = masm.branchPtr(Assembler::NotEqual, parent, ImmPtr(NULL)); { RESERVE_OOL_SPACE(stubcc.masm); pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0)); @@ -5976,7 +5964,7 @@ mjit::Compiler::jsop_this() * In direct-call eval code, we wrapped 'this' before entering the eval. * In global code, 'this' is always an object. */ - if (script->hasFunction && !script->strictModeCode) { + if (script->function() && !script->strictModeCode) { FrameEntry *thisFe = frame.peek(-1); if (!thisFe->isType(JSVAL_TYPE_OBJECT)) { @@ -6056,7 +6044,7 @@ mjit::Compiler::iter(uintN flags) stubcc.linkExit(nullIterator, Uses(1)); /* Get NativeIterator from iter obj. */ - masm.loadObjPrivate(ioreg, nireg); + masm.loadObjPrivate(ioreg, nireg, JSObject::ITER_CLASS_NFIXED_SLOTS); /* Test for active iterator. */ Address flagsAddr(nireg, offsetof(NativeIterator, flags)); @@ -6068,8 +6056,8 @@ mjit::Compiler::iter(uintN flags) /* Compare shape of object with iterator. */ masm.loadShape(reg, T1); masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2); - masm.load32(Address(T2, 0), T2); - Jump mismatchedObject = masm.branch32(Assembler::NotEqual, T1, T2); + masm.loadPtr(Address(T2, 0), T2); + Jump mismatchedObject = masm.branchPtr(Assembler::NotEqual, T1, T2); stubcc.linkExit(mismatchedObject, Uses(1)); /* Compare shape of object's prototype with iterator. */ @@ -6077,8 +6065,8 @@ mjit::Compiler::iter(uintN flags) masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1); masm.loadShape(T1, T1); masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2); - masm.load32(Address(T2, sizeof(uint32)), T2); - Jump mismatchedProto = masm.branch32(Assembler::NotEqual, T1, T2); + masm.loadPtr(Address(T2, sizeof(Shape *)), T2); + Jump mismatchedProto = masm.branchPtr(Assembler::NotEqual, T1, T2); stubcc.linkExit(mismatchedProto, Uses(1)); /* @@ -6153,11 +6141,11 @@ mjit::Compiler::iterNext(ptrdiff_t offset) frame.unpinReg(reg); /* Test clasp */ - Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &IteratorClass); + Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, T1, &IteratorClass); stubcc.linkExit(notFast, Uses(1)); /* Get private from iter obj. */ - masm.loadObjPrivate(reg, T1); + masm.loadObjPrivate(reg, T1, JSObject::ITER_CLASS_NFIXED_SLOTS); RegisterID T3 = frame.allocReg(); RegisterID T4 = frame.allocReg(); @@ -6208,11 +6196,11 @@ mjit::Compiler::iterMore(jsbytecode *target) RegisterID tempreg = frame.allocReg(); /* Test clasp */ - Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &IteratorClass); + Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, tempreg, &IteratorClass); stubcc.linkExitForBranch(notFast); /* Get private from iter obj. */ - masm.loadObjPrivate(reg, reg); + masm.loadObjPrivate(reg, reg, JSObject::ITER_CLASS_NFIXED_SLOTS); /* Test that the iterator supports fast iteration. */ notFast = masm.branchTest32(Assembler::NonZero, Address(reg, offsetof(NativeIterator, flags)), @@ -6247,11 +6235,11 @@ mjit::Compiler::iterEnd() frame.unpinReg(reg); /* Test clasp */ - Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, &IteratorClass); + Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, T1, &IteratorClass); stubcc.linkExit(notIterator, Uses(1)); /* Get private from iter obj. */ - masm.loadObjPrivate(reg, T1); + masm.loadObjPrivate(reg, T1, JSObject::ITER_CLASS_NFIXED_SLOTS); RegisterID T2 = frame.allocReg(); @@ -6351,7 +6339,7 @@ mjit::Compiler::jsop_getgname(uint32 index) */ const js::Shape *shape = globalObj->nativeLookup(cx, ATOM_TO_JSID(atom)); if (shape && shape->hasDefaultGetterOrIsMethod() && shape->hasSlot()) { - HeapValue *value = &globalObj->getSlotRef(shape->slot); + HeapValue *value = &globalObj->getSlotRef(shape->slot()); if (!value->isUndefined() && !propertyTypes->isOwnProperty(cx, globalObj->getType(cx), true)) { watchGlobalReallocation(); @@ -6386,9 +6374,9 @@ mjit::Compiler::jsop_getgname(uint32 index) objReg = frame.allocReg(); - masm.load32FromImm(&obj->objShape, objReg); - shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, objReg, - Imm32(int32(INVALID_SHAPE)), ic.shape); + masm.loadPtrFromImm(obj->addressOfShape(), objReg); + shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, objReg, + ic.shape, ImmPtr(NULL)); masm.move(ImmPtr(obj), objReg); } else { objReg = frame.ownRegForData(fe); @@ -6396,8 +6384,8 @@ mjit::Compiler::jsop_getgname(uint32 index) RegisterID reg = frame.allocReg(); masm.loadShape(objReg, reg); - shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, reg, - Imm32(int32(INVALID_SHAPE)), ic.shape); + shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, reg, + ic.shape, ImmPtr(NULL)); frame.freeReg(reg); } stubcc.linkExit(shapeGuard, Uses(0)); @@ -6413,7 +6401,7 @@ mjit::Compiler::jsop_getgname(uint32 index) /* Garbage value. */ uint32 slot = 1 << 24; - masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg); + masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), objReg); Address address(objReg, slot); /* Allocate any register other than objReg. */ @@ -6437,7 +6425,6 @@ mjit::Compiler::jsop_getgname(uint32 index) getGlobalNames.append(ic); finishBarrier(barrier, REJOIN_GETTER, 0); - #else jsop_getgname_slow(index); #endif @@ -6508,6 +6495,7 @@ mjit::Compiler::jsop_callgname_epilogue() /* If the callee is not an object, jump to the inline fast path. */ MaybeRegisterID typeReg = frame.maybePinType(fval); RegisterID objReg = frame.copyDataIntoReg(fval); + RegisterID tempReg = frame.allocReg(); MaybeJump isNotObj; if (!fval->isType(JSVAL_TYPE_OBJECT)) { @@ -6518,17 +6506,18 @@ mjit::Compiler::jsop_callgname_epilogue() /* * If the callee is not a function, jump to OOL slow path. */ - Jump notFunction = masm.testFunction(Assembler::NotEqual, objReg); + Jump notFunction = masm.testFunction(Assembler::NotEqual, objReg, tempReg); stubcc.linkExit(notFunction, Uses(1)); /* * If the callee's parent is not equal to the global, jump to * OOL slow path. */ - masm.loadPtr(Address(objReg, offsetof(JSObject, parent)), objReg); + masm.loadPtr(Address(objReg, JSFunction::offsetOfEnvironment()), objReg); Jump globalMismatch = masm.branchPtr(Assembler::NotEqual, objReg, ImmPtr(globalObj)); stubcc.linkExit(globalMismatch, Uses(1)); frame.freeReg(objReg); + frame.freeReg(tempReg); /* OOL stub call path. */ stubcc.leave(); @@ -6581,7 +6570,7 @@ mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache, bool popGuara shape->writable() && shape->hasSlot() && !types->isOwnProperty(cx, globalObj->getType(cx), true)) { watchGlobalReallocation(); - HeapValue *value = &globalObj->getSlotRef(shape->slot); + HeapValue *value = &globalObj->getSlotRef(shape->slot()); RegisterID reg = frame.allocReg(); #ifdef JSGC_INCREMENTAL_MJ /* Write barrier. */ @@ -6633,10 +6622,9 @@ mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache, bool popGuara ic.shapeReg = ic.objReg; ic.objConst = true; - masm.load32FromImm(&obj->objShape, ic.shapeReg); - shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, ic.shapeReg, - Imm32(int32(INVALID_SHAPE)), - ic.shape); + masm.loadPtrFromImm(obj->addressOfShape(), ic.shapeReg); + shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, ic.shapeReg, + ic.shape, ImmPtr(NULL)); masm.move(ImmPtr(obj), ic.objReg); } else { ic.objReg = frame.copyDataIntoReg(objFe); @@ -6644,9 +6632,8 @@ mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache, bool popGuara ic.objConst = false; masm.loadShape(ic.objReg, ic.shapeReg); - shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, ic.shapeReg, - Imm32(int32(INVALID_SHAPE)), - ic.shape); + shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, ic.shapeReg, + ic.shape, ImmPtr(NULL)); frame.freeReg(ic.shapeReg); } ic.shapeGuardJump = shapeGuard; @@ -6661,7 +6648,7 @@ mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache, bool popGuara ic.usePropertyCache = usePropertyCache; - masm.loadPtr(Address(ic.objReg, offsetof(JSObject, slots)), ic.objReg); + masm.loadPtr(Address(ic.objReg, JSObject::offsetOfSlots()), ic.objReg); Address address(ic.objReg, slot); if (ic.vr.isConstant()) { @@ -6705,13 +6692,6 @@ mjit::Compiler::jsop_getelem_slow() pushSyncedEntry(0); } -void -mjit::Compiler::jsop_unbrand() -{ - prepareStubCall(Uses(1)); - INLINE_STUBCALL(stubs::Unbrand, REJOIN_FALLTHROUGH); -} - bool mjit::Compiler::jsop_instanceof() { @@ -6734,13 +6714,20 @@ mjit::Compiler::jsop_instanceof() frame.forgetMismatchedObject(lhs); frame.forgetMismatchedObject(rhs); + RegisterID tmp = frame.allocReg(); RegisterID obj = frame.tempRegForData(rhs); - Jump notFunction = masm.testFunction(Assembler::NotEqual, obj); + + masm.loadBaseShape(obj, tmp); + Jump notFunction = masm.branchPtr(Assembler::NotEqual, + Address(tmp, BaseShape::offsetOfClass()), + ImmPtr(&FunctionClass)); + stubcc.linkExit(notFunction, Uses(2)); /* Test for bound functions. */ - Jump isBound = masm.branchTest32(Assembler::NonZero, Address(obj, offsetof(JSObject, flags)), - Imm32(JSObject::BOUND_FUNCTION)); + Jump isBound = masm.branchTest32(Assembler::NonZero, + Address(tmp, BaseShape::offsetOfFlags()), + Imm32(BaseShape::BOUND_FUNCTION)); { stubcc.linkExit(isBound, Uses(2)); stubcc.leave(); @@ -6748,6 +6735,7 @@ mjit::Compiler::jsop_instanceof() firstSlow = stubcc.masm.jump(); } + frame.freeReg(tmp); /* This is sadly necessary because the error case needs the object. */ frame.dup(); @@ -6868,11 +6856,14 @@ mjit::Compiler::jsop_newinit() return false; } + size_t maxArraySlots = + gc::GetGCKindSlots(gc::FINALIZE_OBJECT_LAST) - ObjectElements::VALUES_PER_HEADER; + if (!cx->typeInferenceEnabled() || !globalObj || - (isArray && count >= gc::GetGCKindSlots(gc::FINALIZE_OBJECT_LAST)) || + (isArray && count > maxArraySlots) || (!isArray && !baseobj) || - (!isArray && baseobj->hasSlotsArray())) { + (!isArray && baseobj->hasDynamicSlots())) { prepareStubCall(Uses(0)); masm.storePtr(ImmPtr(type), FrameAddress(offsetof(VMFrame, scratch))); masm.move(ImmPtr(stubArg), Registers::ArgReg1); @@ -6959,7 +6950,7 @@ mjit::Compiler::jsop_regexp() if (JSOp(*use) == JSOP_CALLPROP) { JSObject *callee = analysis->pushedTypes(use, 0)->getSingleton(cx); if (callee && callee->isFunction()) { - Native native = callee->getFunctionPrivate()->maybeNative(); + Native native = callee->toFunction()->maybeNative(); if (native == js::regexp_exec || native == js::regexp_test) { frame.push(ObjectValue(*obj)); return true; @@ -6969,7 +6960,7 @@ mjit::Compiler::jsop_regexp() uint32 argc = GET_ARGC(use); JSObject *callee = analysis->poppedTypes(use, argc + 1)->getSingleton(cx); if (callee && callee->isFunction() && argc >= 1 && which == argc - 1) { - Native native = callee->getFunctionPrivate()->maybeNative(); + Native native = callee->toFunction()->maybeNative(); if (native == js::str_match || native == js::str_search || native == js::str_replace || @@ -7856,30 +7847,16 @@ mjit::Compiler::addTypeTest(types::TypeSet *types, RegisterID typeReg, RegisterI Jump notObject = masm.testObject(Assembler::NotEqual, typeReg); Address typeAddress(dataReg, JSObject::offsetOfType()); - /* - * Test for a singleton objects first. If singletons have lazy types - * then they may share their raw type pointer with another type object - * in the observed set and we can get a spurious match. - */ - Jump notSingleton = masm.branchTest32(Assembler::Zero, - Address(dataReg, offsetof(JSObject, flags)), - Imm32(JSObject::SINGLETON_TYPE)); - for (unsigned i = 0; i < count; i++) { if (JSObject *object = types->getSingleObject(i)) matches.append(masm.branchPtr(Assembler::Equal, dataReg, ImmPtr(object))); } - Jump singletonMismatch = masm.jump(); - - notSingleton.linkTo(masm.label(), &masm); - for (unsigned i = 0; i < count; i++) { if (types::TypeObject *object = types->getTypeObject(i)) matches.append(masm.branchPtr(Assembler::Equal, typeAddress, ImmPtr(object))); } - singletonMismatch.linkTo(masm.label(), &masm); notObject.linkTo(masm.label(), &masm); } diff --git a/js/src/methodjit/Compiler.h b/js/src/methodjit/Compiler.h index ab2e7650c3a4..7355695190ae 100644 --- a/js/src/methodjit/Compiler.h +++ b/js/src/methodjit/Compiler.h @@ -80,7 +80,7 @@ class Compiler : public BaseCompiler struct GlobalNameICInfo { Label fastPathStart; Call slowPathCall; - DataLabel32 shape; + DataLabelPtr shape; DataLabelPtr addrLabel; bool usePropertyCache; @@ -147,7 +147,6 @@ class Compiler : public BaseCompiler Jump oolJump; Label icCall; RegisterID funObjReg; - RegisterID funPtrReg; FrameSize frameSize; bool typeMonitored; }; @@ -200,7 +199,7 @@ class Compiler : public BaseCompiler RegisterID objReg; ValueRemat id; MaybeJump typeGuard; - Jump claspGuard; + Jump shapeGuard; }; struct SetElementICInfo : public BaseICInfo { @@ -210,7 +209,7 @@ class Compiler : public BaseCompiler StateRemat objRemat; ValueRemat vr; Jump capacityGuard; - Jump claspGuard; + Jump shapeGuard; Jump holeGuard; Int32Key key; uint32 volatileMask; @@ -233,7 +232,6 @@ class Compiler : public BaseCompiler bool typeMonitored; types::TypeSet *rhsTypes; ValueRemat vr; -#ifdef JS_HAS_IC_LABELS union { ic::GetPropLabels getPropLabels_; ic::SetPropLabels setPropLabels_; @@ -258,25 +256,6 @@ class Compiler : public BaseCompiler kind == ic::PICInfo::XNAME); return scopeNameLabels_; } -#else - ic::GetPropLabels &getPropLabels() { - JS_ASSERT(kind == ic::PICInfo::GET || kind == ic::PICInfo::CALL); - return ic::PICInfo::getPropLabels_; - } - ic::SetPropLabels &setPropLabels() { - JS_ASSERT(kind == ic::PICInfo::SET || kind == ic::PICInfo::SETMETHOD); - return ic::PICInfo::setPropLabels_; - } - ic::BindNameLabels &bindNameLabels() { - JS_ASSERT(kind == ic::PICInfo::BIND); - return ic::PICInfo::bindNameLabels_; - } - ic::ScopeNameLabels &scopeNameLabels() { - JS_ASSERT(kind == ic::PICInfo::NAME || kind == ic::PICInfo::CALLNAME || - kind == ic::PICInfo::XNAME); - return ic::PICInfo::scopeNameLabels_; - } -#endif void copySimpleMembersTo(ic::PICInfo &ic) { ic.kind = kind; @@ -292,7 +271,6 @@ class Compiler : public BaseCompiler } ic.typeMonitored = typeMonitored; ic.rhsTypes = rhsTypes; -#ifdef JS_HAS_IC_LABELS if (ic.isGet()) ic.setLabels(getPropLabels()); else if (ic.isSet()) @@ -301,7 +279,6 @@ class Compiler : public BaseCompiler ic.setLabels(bindNameLabels()); else if (ic.isScopeName()) ic.setLabels(scopeNameLabels()); -#endif } }; @@ -647,7 +624,6 @@ private: void jsop_setelem_slow(); void jsop_getelem_slow(); void jsop_callelem_slow(); - void jsop_unbrand(); bool jsop_getprop(JSAtom *atom, JSValueType type, bool typeCheck = true, bool usePropCache = true); bool jsop_setprop(JSAtom *atom, bool usePropCache, bool popGuaranteed); diff --git a/js/src/methodjit/FastBuiltins.cpp b/js/src/methodjit/FastBuiltins.cpp index db248c1bfc14..61ad5cc7bf01 100644 --- a/js/src/methodjit/FastBuiltins.cpp +++ b/js/src/methodjit/FastBuiltins.cpp @@ -457,30 +457,30 @@ mjit::Compiler::compileArrayPush(FrameEntry *thisValue, FrameEntry *arg) frame.pinReg(objReg); RegisterID slotsReg = frame.allocReg(); + masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), slotsReg); RegisterID lengthReg = frame.allocReg(); - masm.load32(Address(objReg, offsetof(JSObject, privateData)), lengthReg); + masm.load32(Address(slotsReg, ObjectElements::offsetOfLength()), lengthReg); frame.unpinReg(objReg); Int32Key key = Int32Key::FromRegister(lengthReg); /* Test for 'length == initializedLength' */ - Jump initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(), - objReg, key, Assembler::NotEqual); + Jump initlenGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + slotsReg, key, Assembler::NotEqual); stubcc.linkExit(initlenGuard, Uses(3)); /* Test for 'length < capacity' */ - Jump capacityGuard = masm.guardArrayExtent(offsetof(JSObject, capacity), - objReg, key, Assembler::BelowOrEqual); + Jump capacityGuard = masm.guardArrayExtent(ObjectElements::offsetOfCapacity(), + slotsReg, key, Assembler::BelowOrEqual); stubcc.linkExit(capacityGuard, Uses(3)); - masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), slotsReg); masm.storeValue(vr, BaseIndex(slotsReg, lengthReg, masm.JSVAL_SCALE)); masm.bumpKey(key, 1); - masm.store32(lengthReg, Address(objReg, offsetof(JSObject, privateData))); - masm.store32(lengthReg, Address(objReg, JSObject::offsetOfInitializedLength())); + masm.store32(lengthReg, Address(slotsReg, ObjectElements::offsetOfLength())); + masm.store32(lengthReg, Address(slotsReg, ObjectElements::offsetOfInitializedLength())); stubcc.leave(); stubcc.masm.move(Imm32(1), Registers::ArgReg1); @@ -513,13 +513,12 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool frame.pinReg(objReg); RegisterID lengthReg = frame.allocReg(); - masm.load32(Address(objReg, offsetof(JSObject, privateData)), lengthReg); + RegisterID slotsReg = frame.allocReg(); JSValueType type = knownPushedType(0); - MaybeRegisterID slotsReg, dataReg, typeReg; + MaybeRegisterID dataReg, typeReg; if (!analysis->popGuaranteed(PC)) { - slotsReg = frame.allocReg(); dataReg = frame.allocReg(); if (type == JSVAL_TYPE_UNKNOWN || type == JSVAL_TYPE_DOUBLE) typeReg = frame.allocReg(); @@ -536,10 +535,13 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool frame.unpinKilledReg(objReg); } + masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), slotsReg); + masm.load32(Address(slotsReg, ObjectElements::offsetOfLength()), lengthReg); + /* Test for 'length == initializedLength' */ Int32Key key = Int32Key::FromRegister(lengthReg); - Jump initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(), - objReg, key, Assembler::NotEqual); + Jump initlenGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + slotsReg, key, Assembler::NotEqual); stubcc.linkExit(initlenGuard, Uses(3)); /* @@ -555,13 +557,12 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool masm.bumpKey(key, -1); if (dataReg.isSet()) { - masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), slotsReg.reg()); Jump holeCheck; if (isArrayPop) { - BaseIndex slot(slotsReg.reg(), lengthReg, masm.JSVAL_SCALE); + BaseIndex slot(slotsReg, lengthReg, masm.JSVAL_SCALE); holeCheck = masm.fastArrayLoadSlot(slot, !isPacked, typeReg, dataReg.reg()); } else { - holeCheck = masm.fastArrayLoadSlot(Address(slotsReg.reg()), !isPacked, typeReg, dataReg.reg()); + holeCheck = masm.fastArrayLoadSlot(Address(slotsReg), !isPacked, typeReg, dataReg.reg()); Address addr = frame.addressOf(frame.peek(-2)); if (typeReg.isSet()) masm.storeValueFromComponents(typeReg.reg(), dataReg.reg(), addr); @@ -570,11 +571,10 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool } if (!isPacked) stubcc.linkExit(holeCheck, Uses(3)); - frame.freeReg(slotsReg.reg()); } - masm.store32(lengthReg, Address(objReg, offsetof(JSObject, privateData))); - masm.store32(lengthReg, Address(objReg, JSObject::offsetOfInitializedLength())); + masm.store32(lengthReg, Address(slotsReg, ObjectElements::offsetOfLength())); + masm.store32(lengthReg, Address(slotsReg, ObjectElements::offsetOfInitializedLength())); if (!isArrayPop) INLINE_STUBCALL(stubs::ArrayShift, REJOIN_NONE); @@ -583,6 +583,7 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool stubcc.masm.move(Imm32(0), Registers::ArgReg1); OOL_STUBCALL(stubs::SlowCall, REJOIN_FALLTHROUGH); + frame.freeReg(slotsReg); frame.freeReg(lengthReg); frame.popn(2); @@ -668,22 +669,27 @@ mjit::Compiler::compileArrayConcat(types::TypeSet *thisTypes, types::TypeSet *ar /* Test for 'length == initializedLength' on both arrays. */ + RegisterID slotsReg = frame.allocReg(); RegisterID reg = frame.allocReg(); + Int32Key key = Int32Key::FromRegister(reg); RegisterID objReg = frame.tempRegForData(thisValue); - masm.load32(Address(objReg, offsetof(JSObject, privateData)), reg); - Jump initlenOneGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(), - objReg, key, Assembler::NotEqual); + masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), slotsReg); + masm.load32(Address(slotsReg, ObjectElements::offsetOfLength()), reg); + Jump initlenOneGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + slotsReg, key, Assembler::NotEqual); stubcc.linkExit(initlenOneGuard, Uses(3)); objReg = frame.tempRegForData(argValue); - masm.load32(Address(objReg, offsetof(JSObject, privateData)), reg); - Jump initlenTwoGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(), - objReg, key, Assembler::NotEqual); + masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), slotsReg); + masm.load32(Address(slotsReg, ObjectElements::offsetOfLength()), reg); + Jump initlenTwoGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + slotsReg, key, Assembler::NotEqual); stubcc.linkExit(initlenTwoGuard, Uses(3)); frame.freeReg(reg); + frame.freeReg(slotsReg); frame.syncAndForgetEverything(); /* @@ -766,7 +772,10 @@ mjit::Compiler::compileArrayWithArgs(uint32 argc) */ JS_ASSERT(argc >= 2); - if (argc >= gc::GetGCKindSlots(gc::FINALIZE_OBJECT_LAST)) + size_t maxArraySlots = + gc::GetGCKindSlots(gc::FINALIZE_OBJECT_LAST) - ObjectElements::VALUES_PER_HEADER; + + if (argc > maxArraySlots) return Compile_InlineAbort; types::TypeObject *type = types::TypeScript::InitObject(cx, script, PC, JSProto_Array); @@ -784,14 +793,16 @@ mjit::Compiler::compileArrayWithArgs(uint32 argc) Jump emptyFreeList = masm.getNewObject(cx, result, templateObject); stubcc.linkExit(emptyFreeList, Uses(0)); + int offset = JSObject::offsetOfFixedElements(); + masm.store32(Imm32(argc), + Address(result, offset + ObjectElements::offsetOfInitializedLength())); + for (unsigned i = 0; i < argc; i++) { FrameEntry *arg = frame.peek(-(int)argc + i); - frame.storeTo(arg, Address(result, JSObject::getFixedSlotOffset(i)), /* popped = */ true); + frame.storeTo(arg, Address(result, offset), /* popped = */ true); + offset += sizeof(Value); } - masm.storePtr(ImmIntPtr(intptr_t(argc)), - Address(result, JSObject::offsetOfInitializedLength())); - stubcc.leave(); stubcc.masm.move(Imm32(argc), Registers::ArgReg1); @@ -831,8 +842,7 @@ mjit::Compiler::inlineNativeFunction(uint32 argc, bool callingNew) if (!globalObj || globalObj != callee->getGlobal()) return Compile_InlineAbort; - JSFunction *fun = callee->getFunctionPrivate(); - Native native = fun->maybeNative(); + Native native = callee->toFunction()->maybeNative(); if (!native) return Compile_InlineAbort; diff --git a/js/src/methodjit/FastOps.cpp b/js/src/methodjit/FastOps.cpp index 12a34d41a159..c81fff93751a 100644 --- a/js/src/methodjit/FastOps.cpp +++ b/js/src/methodjit/FastOps.cpp @@ -1105,17 +1105,17 @@ mjit::Compiler::jsop_setelem_dense() if (pinKey) frame.unpinReg(key.reg()); } else { - // Get a register for the object which we can clobber. - RegisterID objReg; + // Get a register for the object which we can clobber, and load its elements. if (frame.haveSameBacking(obj, value)) { - objReg = frame.allocReg(); - masm.move(vr.dataReg(), objReg); + slotsReg = frame.allocReg(); + masm.move(vr.dataReg(), slotsReg); } else if (frame.haveSameBacking(obj, id)) { - objReg = frame.allocReg(); - masm.move(key.reg(), objReg); + slotsReg = frame.allocReg(); + masm.move(key.reg(), slotsReg); } else { - objReg = frame.copyDataIntoReg(obj); + slotsReg = frame.copyDataIntoReg(obj); } + masm.loadPtr(Address(slotsReg, JSObject::offsetOfElements()), slotsReg); frame.unpinEntry(vr); if (pinKey) @@ -1124,19 +1124,19 @@ mjit::Compiler::jsop_setelem_dense() // Make an OOL path for setting exactly the initialized length. Label syncTarget = stubcc.syncExitAndJump(Uses(3)); - Jump initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(), - objReg, key, Assembler::BelowOrEqual); + Jump initlenGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + slotsReg, key, Assembler::BelowOrEqual); stubcc.linkExitDirect(initlenGuard, stubcc.masm.label()); // Recheck for an exact initialized length. :TODO: would be nice to // reuse the condition bits from the previous test. - Jump exactlenGuard = stubcc.masm.guardArrayExtent(JSObject::offsetOfInitializedLength(), - objReg, key, Assembler::NotEqual); + Jump exactlenGuard = stubcc.masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + slotsReg, key, Assembler::NotEqual); exactlenGuard.linkTo(syncTarget, &stubcc.masm); // Check array capacity. - Jump capacityGuard = stubcc.masm.guardArrayExtent(offsetof(JSObject, capacity), - objReg, key, Assembler::BelowOrEqual); + Jump capacityGuard = stubcc.masm.guardArrayExtent(ObjectElements::offsetOfCapacity(), + slotsReg, key, Assembler::BelowOrEqual); capacityGuard.linkTo(syncTarget, &stubcc.masm); // Bump the index for setting the array length. The above guard @@ -1144,23 +1144,18 @@ mjit::Compiler::jsop_setelem_dense() stubcc.masm.bumpKey(key, 1); // Update the initialized length. - stubcc.masm.storeKey(key, Address(objReg, JSObject::offsetOfInitializedLength())); + stubcc.masm.storeKey(key, Address(slotsReg, ObjectElements::offsetOfInitializedLength())); // Update the array length if needed. - Jump lengthGuard = stubcc.masm.guardArrayExtent(offsetof(JSObject, privateData), - objReg, key, Assembler::AboveOrEqual); - stubcc.masm.storeKey(key, Address(objReg, offsetof(JSObject, privateData))); + Jump lengthGuard = stubcc.masm.guardArrayExtent(ObjectElements::offsetOfLength(), + slotsReg, key, Assembler::AboveOrEqual); + stubcc.masm.storeKey(key, Address(slotsReg, ObjectElements::offsetOfLength())); lengthGuard.linkTo(stubcc.masm.label(), &stubcc.masm); // Restore the index. stubcc.masm.bumpKey(key, -1); - stubcc.masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg); - initlenExit = stubcc.masm.jump(); - - masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg); - slotsReg = objReg; } #ifdef JSGC_INCREMENTAL_MJ @@ -1492,6 +1487,20 @@ mjit::Compiler::tryConvertInteger(FrameEntry *fe, Uses uses) frame.learnType(fe, JSVAL_TYPE_INT32, reg); } +/* Get the common shape used by all dense arrays with a prototype at globalObj. */ +static inline Shape * +GetDenseArrayShape(JSContext *cx, JSObject *globalObj) +{ + JS_ASSERT(globalObj); + + JSObject *proto; + if (!js_GetClassPrototype(cx, globalObj, JSProto_Array, &proto, NULL)) + return false; + + return EmptyShape::getInitialShape(cx, &ArrayClass, proto, + proto->getParent(), gc::FINALIZE_OBJECT0); +} + bool mjit::Compiler::jsop_setelem(bool popGuaranteed) { @@ -1531,7 +1540,7 @@ mjit::Compiler::jsop_setelem(bool popGuaranteed) #endif } - if (id->isType(JSVAL_TYPE_DOUBLE)) { + if (id->isType(JSVAL_TYPE_DOUBLE) || !globalObj) { jsop_setelem_slow(); return true; } @@ -1627,17 +1636,20 @@ mjit::Compiler::jsop_setelem(bool popGuaranteed) ic.slowPathStart = stubcc.syncExit(Uses(3)); // Guard obj is a dense array. - ic.claspGuard = masm.testObjClass(Assembler::NotEqual, ic.objReg, &ArrayClass); - stubcc.linkExitDirect(ic.claspGuard, ic.slowPathStart); + Shape *shape = GetDenseArrayShape(cx, globalObj); + if (!shape) + return false; + ic.shapeGuard = masm.guardShape(ic.objReg, shape); + stubcc.linkExitDirect(ic.shapeGuard, ic.slowPathStart); + + // Load the dynamic elements vector. + masm.loadPtr(Address(ic.objReg, JSObject::offsetOfElements()), ic.objReg); // Guard in range of initialized length. - Jump initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(), + Jump initlenGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), ic.objReg, ic.key, Assembler::BelowOrEqual); stubcc.linkExitDirect(initlenGuard, ic.slowPathStart); - // Load the dynamic slots vector. - masm.loadPtr(Address(ic.objReg, offsetof(JSObject, slots)), ic.objReg); - // Guard there's no hole, then store directly to the slot. if (ic.key.isConstant()) { Address slot(ic.objReg, ic.key.index() * sizeof(Value)); @@ -1763,7 +1775,7 @@ mjit::Compiler::jsop_getelem_dense(bool isPacked) loop->hoistArrayLengthCheck(DENSE_ARRAY, objv, indexv); // Get a register with either the object or its slots, depending on whether - // we are hoisting the bounds check. + // we are hoisting the slots computation. RegisterID baseReg; if (hoisted) { FrameEntry *slotsFe = loop->invariantArraySlots(objv); @@ -1786,13 +1798,6 @@ mjit::Compiler::jsop_getelem_dense(bool isPacked) if (type == JSVAL_TYPE_UNKNOWN || type == JSVAL_TYPE_DOUBLE || hasTypeBarriers(PC)) typeReg = frame.allocReg(); - // Guard on the array's initialized length. - MaybeJump initlenGuard; - if (!hoisted) { - initlenGuard = masm.guardArrayExtent(JSObject::offsetOfInitializedLength(), - baseReg, key, Assembler::BelowOrEqual); - } - frame.unpinReg(baseReg); if (pinKey) frame.unpinReg(key.reg()); @@ -1801,10 +1806,17 @@ mjit::Compiler::jsop_getelem_dense(bool isPacked) if (hoisted) { slotsReg = baseReg; } else { + masm.loadPtr(Address(baseReg, JSObject::offsetOfElements()), dataReg); + slotsReg = dataReg; + } + + // Guard on the array's initialized length. + MaybeJump initlenGuard; + if (!hoisted) { + initlenGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + slotsReg, key, Assembler::BelowOrEqual); if (!allowUndefined) stubcc.linkExit(initlenGuard.get(), Uses(2)); - masm.loadPtr(Address(baseReg, offsetof(JSObject, slots)), dataReg); - slotsReg = dataReg; } // Get the slot, skipping the hole check if the array is known to be packed. @@ -2124,7 +2136,7 @@ mjit::Compiler::jsop_getelem(bool isCall) frame.forgetMismatchedObject(obj); - if (id->isType(JSVAL_TYPE_DOUBLE)) { + if (id->isType(JSVAL_TYPE_DOUBLE) || !globalObj) { if (isCall) jsop_callelem_slow(); else @@ -2200,9 +2212,12 @@ mjit::Compiler::jsop_getelem(bool isCall) stubcc.linkExitDirect(ic.typeGuard.get(), ic.slowPathStart); } - // Guard on the clasp. - ic.claspGuard = masm.testObjClass(Assembler::NotEqual, ic.objReg, &ArrayClass); - stubcc.linkExitDirect(ic.claspGuard, ic.slowPathStart); + // Guard obj is a dense array. + Shape *shape = GetDenseArrayShape(cx, globalObj); + if (!shape) + return false; + ic.shapeGuard = masm.guardShape(ic.objReg, shape); + stubcc.linkExitDirect(ic.shapeGuard, ic.slowPathStart); Int32Key key = id->isConstant() ? Int32Key::FromConstant(id->getValue().toInt32()) @@ -2224,8 +2239,8 @@ mjit::Compiler::jsop_getelem(bool isCall) } else { // The type is known to not be dense-friendly ahead of time, so always // fall back to a slow path. - ic.claspGuard = masm.jump(); - stubcc.linkExitDirect(ic.claspGuard, ic.slowPathStart); + ic.shapeGuard = masm.jump(); + stubcc.linkExitDirect(ic.shapeGuard, ic.slowPathStart); } stubcc.leave(); @@ -2687,7 +2702,7 @@ mjit::Compiler::jsop_initprop() /* Perform the store. */ Shape *shape = (Shape *) prop; - Address address = masm.objPropAddress(baseobj, objReg, shape->slot); + Address address = masm.objPropAddress(baseobj, objReg, shape->slot()); frame.storeTo(fe, address); frame.freeReg(objReg); } @@ -2718,14 +2733,12 @@ mjit::Compiler::jsop_initelem() int32 idx = id->getValue().toInt32(); RegisterID objReg = frame.copyDataIntoReg(obj); + masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), objReg); - if (cx->typeInferenceEnabled()) { - /* Update the initialized length. */ - masm.store32(Imm32(idx + 1), Address(objReg, JSObject::offsetOfInitializedLength())); - } + /* Update the initialized length. */ + masm.store32(Imm32(idx + 1), Address(objReg, ObjectElements::offsetOfInitializedLength())); /* Perform the store. */ - masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg); frame.storeTo(fe, Address(objReg, idx * sizeof(Value))); frame.freeReg(objReg); } diff --git a/js/src/methodjit/FrameState-inl.h b/js/src/methodjit/FrameState-inl.h index a6eb90a61a6a..aa061226245d 100644 --- a/js/src/methodjit/FrameState-inl.h +++ b/js/src/methodjit/FrameState-inl.h @@ -1078,7 +1078,7 @@ FrameState::frameOffset(const FrameEntry *fe, ActiveFrame *a) const if (fe >= a->args) return StackFrame::offsetOfFormalArg(a->script->function(), uint32(fe - a->args)); if (fe == a->this_) - return StackFrame::offsetOfThis(a->script->hasFunction ? a->script->function() : NULL); + return StackFrame::offsetOfThis(a->script->function()); if (fe == a->callee_) return StackFrame::offsetOfCallee(a->script->function()); JS_NOT_REACHED("Bad fe"); @@ -1269,7 +1269,7 @@ inline FrameEntry * FrameState::getCallee() { // Callee can only be used in function code, and it's always an object. - JS_ASSERT(a->script->hasFunction); + JS_ASSERT(a->script->function()); FrameEntry *fe = a->callee_; if (!fe->isTracked()) { addToTracker(fe); diff --git a/js/src/methodjit/FrameState.h b/js/src/methodjit/FrameState.h index 0ff85222d852..47bbe7324e51 100644 --- a/js/src/methodjit/FrameState.h +++ b/js/src/methodjit/FrameState.h @@ -1052,7 +1052,7 @@ class FrameState inline bool isConstructorThis(const FrameEntry *fe) const; bool isArg(const FrameEntry *fe) const { - return a->script->hasFunction && fe >= a->args && fe - a->args < a->script->function()->nargs; + return a->script->function() && fe >= a->args && fe - a->args < a->script->function()->nargs; } bool isLocal(const FrameEntry *fe) const { diff --git a/js/src/methodjit/ICLabels.h b/js/src/methodjit/ICLabels.h index 50b60643c0f1..7923a248be88 100644 --- a/js/src/methodjit/ICLabels.h +++ b/js/src/methodjit/ICLabels.h @@ -54,29 +54,14 @@ namespace js { namespace mjit { namespace ic { -/* - * On x64 and ARM, we record offsets into the labels data structures at runtime - * instead of using hardcoded offsets into the instruction stream, as we do on - * x86. - * - * This is done on x64 because of variable-width instruction encoding when - * using the extended register set. It is done on ARM for ease of - * implementation. - */ - -#if defined JS_CPU_X64 || defined JS_CPU_ARM || defined JS_CPU_SPARC -# define JS_HAS_IC_LABELS -#endif - /* GetPropCompiler */ struct GetPropLabels : MacroAssemblerTypedefs { friend class ::ICOffsetInitializer; void setValueLoad(MacroAssembler &masm, Label fastPathRejoin, Label fastValueLoad) { int offset = masm.differenceBetween(fastPathRejoin, fastValueLoad); -#ifdef JS_HAS_IC_LABELS inlineValueLoadOffset = offset; -#endif + /* * Note: the offset between the type and data loads for x86 is asserted * in NunboxAssembler::loadValueWithAddressOffsetPatch. @@ -98,17 +83,17 @@ struct GetPropLabels : MacroAssemblerTypedefs { return fastPathRejoin.instructionAtOffset(getDslotsLoadOffset()); } - void setInlineShapeData(MacroAssembler &masm, Label shapeGuard, DataLabel32 inlineShape) { + void setInlineShapeData(MacroAssembler &masm, Label shapeGuard, DataLabelPtr inlineShape) { int offset = masm.differenceBetween(shapeGuard, inlineShape); setInlineShapeOffset(offset); } - CodeLocationDataLabel32 getInlineShapeData(CodeLocationLabel fastShapeGuard) { - return fastShapeGuard.dataLabel32AtOffset(getInlineShapeOffset()); + CodeLocationDataLabelPtr getInlineShapeData(CodeLocationLabel fastShapeGuard) { + return fastShapeGuard.dataLabelPtrAtOffset(getInlineShapeOffset()); } /* - * Note: on x64, the base is the inlineShapeLabel DataLabel32, whereas on other + * Note: on x64, the base is the inlineShapeLabel DataLabelPtr, whereas on other * platforms the base is the shapeGuard. */ template @@ -137,53 +122,36 @@ struct GetPropLabels : MacroAssemblerTypedefs { /* Offset-based interface */ void setDslotsLoadOffset(int offset) { -#ifdef JS_HAS_IC_LABELS dslotsLoadOffset = offset; -#endif JS_ASSERT(offset == dslotsLoadOffset); } void setInlineShapeOffset(int offset) { -#ifdef JS_HAS_IC_LABELS inlineShapeOffset = offset; -#endif JS_ASSERT(offset == inlineShapeOffset); } void setStubShapeJumpOffset(int offset) { -#ifdef JS_HAS_IC_LABELS stubShapeJumpOffset = offset; -#endif JS_ASSERT(offset == stubShapeJumpOffset); } int getInlineShapeJumpOffset() { -#if defined JS_CPU_X64 - return getInlineShapeOffset() + INLINE_SHAPE_JUMP; -#else - return POST_INST_OFFSET(INLINE_SHAPE_JUMP); -#endif + return POST_INST_OFFSET(inlineShapeJumpOffset); } void setInlineShapeJumpOffset(int offset) { - JS_ASSERT(INLINE_SHAPE_JUMP == offset); + inlineShapeJumpOffset = offset; + JS_ASSERT(offset == inlineShapeJumpOffset); } int getInlineTypeJumpOffset() { -#if defined JS_CPU_X86 || defined JS_CPU_X64 - return INLINE_TYPE_JUMP; -#elif defined JS_CPU_ARM || defined JS_CPU_SPARC return POST_INST_OFFSET(inlineTypeJumpOffset); -#endif } void setInlineTypeJumpOffset(int offset) { -#if defined JS_CPU_X86 || defined JS_CPU_X64 - JS_ASSERT(INLINE_TYPE_JUMP == offset); -#elif defined JS_CPU_ARM || defined JS_CPU_SPARC inlineTypeJumpOffset = offset; JS_ASSERT(offset == inlineTypeJumpOffset); -#endif } int getInlineShapeOffset() { @@ -213,55 +181,38 @@ struct GetPropLabels : MacroAssemblerTypedefs { */ int32 stubShapeJumpOffset : 8; -#if defined JS_CPU_X86 - static const int32 INLINE_SHAPE_JUMP = 12; - static const int32 INLINE_TYPE_JUMP = 9; -#elif defined JS_CPU_X64 - static const int32 INLINE_SHAPE_JUMP = 6; - static const int32 INLINE_TYPE_JUMP = 19; -#elif defined JS_CPU_ARM /* Offset from the shape guard start to the shape guard jump. */ - static const int32 INLINE_SHAPE_JUMP = 12; + int32 inlineShapeJumpOffset : 8; /* Offset from the fast path to the type guard jump. */ int32 inlineTypeJumpOffset : 8; -#elif defined JS_CPU_SPARC - static const int32 INLINE_SHAPE_JUMP = 48; - static const int32 INLINE_TYPE_JUMP = 48; - /* Offset from the fast path to the type guard jump. */ - int32 inlineTypeJumpOffset : 8; -#endif }; /* SetPropCompiler */ struct SetPropLabels : MacroAssemblerTypedefs { friend class ::ICOffsetInitializer; - void setInlineValueStore(MacroAssembler &masm, Label fastPathRejoin, DataLabel32 inlineValueStore, - const ValueRemat &vr) { + void setInlineValueStore(MacroAssembler &masm, Label fastPathRejoin, DataLabel32 inlineValueStore) { int offset = masm.differenceBetween(fastPathRejoin, inlineValueStore); - setInlineValueStoreOffset(offset, vr.isConstant(), vr.isTypeKnown()); + setInlineValueStoreOffset(offset); } - CodeLocationLabel getInlineValueStore(CodeLocationLabel fastPathRejoin, const ValueRemat &vr) { - return fastPathRejoin.labelAtOffset(getInlineValueStoreOffset(vr.isConstant(), - vr.isTypeKnown())); + CodeLocationLabel getInlineValueStore(CodeLocationLabel fastPathRejoin) { + return fastPathRejoin.labelAtOffset(getInlineValueStoreOffset()); } - void setInlineShapeData(MacroAssembler &masm, Label shapeGuard, DataLabel32 inlineShapeData) { + void setInlineShapeData(MacroAssembler &masm, Label shapeGuard, DataLabelPtr inlineShapeData) { int offset = masm.differenceBetween(shapeGuard, inlineShapeData); setInlineShapeDataOffset(offset); } - CodeLocationDataLabel32 getInlineShapeData(CodeLocationLabel fastPathStart, int shapeGuardOffset) { - return fastPathStart.dataLabel32AtOffset(shapeGuardOffset + getInlineShapeDataOffset()); + CodeLocationDataLabelPtr getInlineShapeData(CodeLocationLabel fastPathStart, int shapeGuardOffset) { + return fastPathStart.dataLabelPtrAtOffset(shapeGuardOffset + getInlineShapeDataOffset()); } - void setDslotsLoad(MacroAssembler &masm, Label fastPathRejoin, Label beforeLoad, - const ValueRemat &rhs) { - JS_ASSERT(!rhs.isFPRegister()); + void setDslotsLoad(MacroAssembler &masm, Label fastPathRejoin, Label beforeLoad) { int offset = masm.differenceBetween(fastPathRejoin, beforeLoad); - setDslotsLoadOffset(offset, rhs.isConstant(), rhs.isTypeKnown()); + setDslotsLoadOffset(offset); } CodeLocationInstruction getDslotsLoad(CodeLocationLabel fastPathRejoin, const ValueRemat &vr) { @@ -289,61 +240,33 @@ struct SetPropLabels : MacroAssemblerTypedefs { /* Offset-based interface. */ - void setDslotsLoadOffset(int offset, bool isConstant, bool isTypeKnown) { -#if defined JS_HAS_IC_LABELS + void setDslotsLoadOffset(int offset) { dslotsLoadOffset = offset; JS_ASSERT(offset == dslotsLoadOffset); -#elif defined JS_CPU_X86 - JS_ASSERT_IF(isConstant, offset == INLINE_DSLOTS_BEFORE_CONSTANT); - JS_ASSERT_IF(isTypeKnown && !isConstant, offset == INLINE_DSLOTS_BEFORE_KTYPE); - JS_ASSERT_IF(!isTypeKnown, offset == INLINE_DSLOTS_BEFORE_DYNAMIC); -#else -# error -#endif } int getDslotsLoadOffset(const ValueRemat &vr) { -#if defined JS_CPU_X86 - if (vr.isConstant()) - return INLINE_DSLOTS_BEFORE_CONSTANT; - if (vr.isTypeKnown()) - return INLINE_DSLOTS_BEFORE_KTYPE; - return INLINE_DSLOTS_BEFORE_DYNAMIC; -#else (void) vr; return dslotsLoadOffset; -#endif } void setInlineShapeDataOffset(int offset) { -#ifdef JS_HAS_IC_LABELS inlineShapeDataOffset = offset; -#endif JS_ASSERT(offset == inlineShapeDataOffset); } void setStubShapeJumpOffset(int offset) { -#ifdef JS_HAS_IC_LABELS stubShapeJumpOffset = offset; -#endif JS_ASSERT(offset == stubShapeJumpOffset); } - void setInlineValueStoreOffset(int offset, bool isConstant, bool isTypeKnown) { -#ifdef JS_HAS_IC_LABELS + void setInlineValueStoreOffset(int offset) { inlineValueStoreOffset = offset; JS_ASSERT(offset == inlineValueStoreOffset); -#elif defined JS_CPU_X86 - JS_ASSERT_IF(isConstant, offset == INLINE_VALUE_STORE_CONSTANT); - JS_ASSERT_IF(isTypeKnown && !isConstant, offset == INLINE_VALUE_STORE_KTYPE); - JS_ASSERT_IF(!isTypeKnown && !isConstant, offset == INLINE_VALUE_STORE_DYNAMIC); -#endif } void setInlineShapeJumpOffset(int offset) { -#ifdef JS_HAS_IC_LABELS inlineShapeJumpOffset = offset; -#endif JS_ASSERT(offset == inlineShapeJumpOffset); } @@ -359,27 +282,12 @@ struct SetPropLabels : MacroAssemblerTypedefs { return POST_INST_OFFSET(stubShapeJumpOffset); } - int getInlineValueStoreOffset(bool isConstant, bool isTypeKnown) { -#ifdef JS_HAS_IC_LABELS + int getInlineValueStoreOffset() { return inlineValueStoreOffset; -#elif defined JS_CPU_X86 - if (isConstant) - return INLINE_VALUE_STORE_CONSTANT; - else if (isTypeKnown) - return INLINE_VALUE_STORE_KTYPE; - else - return INLINE_VALUE_STORE_DYNAMIC; -#endif } /* Offset from storeBack to beginning of 'mov dslots, addr'. */ -#if defined JS_CPU_X86 - static const int INLINE_DSLOTS_BEFORE_CONSTANT = -23; - static const int INLINE_DSLOTS_BEFORE_KTYPE = -19; - static const int INLINE_DSLOTS_BEFORE_DYNAMIC = -15; -#else int32 dslotsLoadOffset : 8; -#endif /* Offset from shapeGuard to end of shape comparison. */ int32 inlineShapeDataOffset : 8; @@ -391,13 +299,7 @@ struct SetPropLabels : MacroAssemblerTypedefs { */ int32 stubShapeJumpOffset : 8; -#if defined JS_CPU_X86 - static const int INLINE_VALUE_STORE_CONSTANT = -20; - static const int INLINE_VALUE_STORE_KTYPE = -16; - static const int INLINE_VALUE_STORE_DYNAMIC = -12; -#else int32 inlineValueStoreOffset : 8; -#endif /* Offset from shapeGuard to the end of the shape jump. */ int32 inlineShapeJumpOffset : 8; @@ -408,9 +310,7 @@ struct BindNameLabels : MacroAssemblerTypedefs { friend class ::ICOffsetInitializer; void setInlineJumpOffset(int offset) { -#ifdef JS_HAS_IC_LABELS inlineJumpOffset = offset; -#endif JS_ASSERT(offset == inlineJumpOffset); } @@ -428,9 +328,7 @@ struct BindNameLabels : MacroAssemblerTypedefs { } void setStubJumpOffset(int offset) { -#ifdef JS_HAS_IC_LABELS stubJumpOffset = offset; -#endif JS_ASSERT(offset == stubJumpOffset); } @@ -460,9 +358,7 @@ struct ScopeNameLabels : MacroAssemblerTypedefs { friend class ::ICOffsetInitializer; void setInlineJumpOffset(int offset) { -#ifdef JS_HAS_IC_LABELS inlineJumpOffset = offset; -#endif JS_ASSERT(offset == inlineJumpOffset); } @@ -480,9 +376,7 @@ struct ScopeNameLabels : MacroAssemblerTypedefs { } void setStubJumpOffset(int offset) { -#ifdef JS_HAS_IC_LABELS stubJumpOffset = offset; -#endif JS_ASSERT(offset == stubJumpOffset); } diff --git a/js/src/methodjit/InlineFrameAssembler.h b/js/src/methodjit/InlineFrameAssembler.h index e8bce636f0f1..4075994488bf 100644 --- a/js/src/methodjit/InlineFrameAssembler.h +++ b/js/src/methodjit/InlineFrameAssembler.h @@ -90,7 +90,6 @@ class InlineFrameAssembler { { frameSize = ic.frameSize; funObjReg = ic.funObjReg; - tempRegs.takeReg(ic.funPtrReg); tempRegs.takeReg(funObjReg); } diff --git a/js/src/methodjit/InvokeHelpers.cpp b/js/src/methodjit/InvokeHelpers.cpp index 7c7906d6cfea..3a35b47bdf2a 100644 --- a/js/src/methodjit/InvokeHelpers.cpp +++ b/js/src/methodjit/InvokeHelpers.cpp @@ -310,8 +310,7 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial, { JSContext *cx = f.cx; CallArgs args = CallArgsFromSp(argc, f.regs.sp); - JSObject &callee = args.callee(); - JSFunction *newfun = callee.getFunctionPrivate(); + JSFunction *newfun = args.callee().toFunction(); JSScript *newscript = newfun->script(); bool construct = InitialFrameFlagsAreConstructing(initial); @@ -352,7 +351,7 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial, FrameRegs regs = f.regs; /* Get pointer to new frame/slots, prepare arguments. */ - if (!cx->stack.pushInlineFrame(cx, regs, args, callee, newfun, newscript, initial, &f.stackLimit)) + if (!cx->stack.pushInlineFrame(cx, regs, args, *newfun, newscript, initial, &f.stackLimit)) return false; /* Finish the handoff to the new frame regs. */ @@ -415,7 +414,6 @@ stubs::UncachedNewHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr) /* Try to do a fast inline call before the general Invoke path. */ if (IsFunctionObject(args.calleev(), &ucr->fun) && ucr->fun->isInterpretedConstructor()) { - ucr->callee = &args.callee(); if (!UncachedInlineCall(f, INITIAL_CONSTRUCT, &ucr->codeAddr, &ucr->unjittable, argc)) THROW(); } else { @@ -469,10 +467,7 @@ stubs::UncachedCallHelper(VMFrame &f, uint32 argc, bool lowered, UncachedCallRes JSContext *cx = f.cx; CallArgs args = CallArgsFromSp(argc, f.regs.sp); - if (IsFunctionObject(args.calleev(), &ucr->callee)) { - ucr->callee = &args.callee(); - ucr->fun = ucr->callee->getFunctionPrivate(); - + if (IsFunctionObject(args.calleev(), &ucr->fun)) { if (ucr->fun->isInterpreted()) { InitialFrameFlags initial = lowered ? INITIAL_LOWERED : INITIAL_NONE; if (!UncachedInlineCall(f, initial, &ucr->codeAddr, &ucr->unjittable, argc)) @@ -610,7 +605,7 @@ js_InternalThrow(VMFrame &f) */ cx->compartment->jaegerCompartment()->setLastUnfinished(Jaeger_Unfinished); - if (!script->ensureRanAnalysis(cx)) { + if (!script->ensureRanAnalysis(cx, NULL)) { js_ReportOutOfMemory(cx); return NULL; } @@ -743,7 +738,7 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM JSOp op = JSOp(*pc); const JSCodeSpec *cs = &js_CodeSpec[op]; - if (!script->ensureRanAnalysis(cx)) { + if (!script->ensureRanAnalysis(cx, NULL)) { js_ReportOutOfMemory(cx); return js_InternalThrow(f); } diff --git a/js/src/methodjit/Logging.cpp b/js/src/methodjit/Logging.cpp index f2b4e6dffb5a..2b9e32843f47 100644 --- a/js/src/methodjit/Logging.cpp +++ b/js/src/methodjit/Logging.cpp @@ -46,6 +46,8 @@ #include "MethodJIT.h" #include "Logging.h" +#include "jsobjinlines.h" + #if defined(JS_METHODJIT_SPEW) static bool LoggingChecked = false; diff --git a/js/src/methodjit/LoopState.cpp b/js/src/methodjit/LoopState.cpp index aadca78ba935..901ff20e9a51 100644 --- a/js/src/methodjit/LoopState.cpp +++ b/js/src/methodjit/LoopState.cpp @@ -168,7 +168,7 @@ LoopState::init(jsbytecode *head, Jump entry, jsbytecode *entryTarget) * Don't hoist bounds checks or loop invariant code in scripts that have * had indirect modification of their arguments. */ - if (outerScript->hasFunction) { + if (outerScript->function()) { if (TypeSet::HasObjectFlags(cx, outerScript->function()->getType(cx), OBJECT_FLAG_UNINLINEABLE)) this->skipAnalysis = true; } @@ -316,7 +316,7 @@ LoopState::entryRedundant(const InvariantEntry &e0, const InvariantEntry &e1) int32 c1 = e1.u.check.constant; /* - * initialized lengths are always <= JSObject::NSLOTS_LIMIT, check for + * initialized lengths are always <= JSObject::NELEMENTS_LIMIT, check for * integer overflow checks redundant given initialized length checks. * If Y <= c0 and Y + c1 < initlen(array): * @@ -331,7 +331,7 @@ LoopState::entryRedundant(const InvariantEntry &e0, const InvariantEntry &e1) constant = c0; else if (!SafeAdd(c0, c1, &constant)) return false; - return constant >= JSObject::NSLOTS_LIMIT; + return constant >= (int32) JSObject::NELEMENTS_LIMIT; } /* Look for matching tests that differ only in their constants. */ @@ -1324,10 +1324,12 @@ LoopState::restoreInvariants(jsbytecode *pc, Assembler &masm, * in the invariant list, so don't recheck this is an object. */ masm.loadPayload(frame.addressOf(entry.u.check.arraySlot), T0); - if (entry.kind == InvariantEntry::DENSE_ARRAY_BOUNDS_CHECK) - masm.load32(Address(T0, JSObject::offsetOfInitializedLength()), T0); - else + if (entry.kind == InvariantEntry::DENSE_ARRAY_BOUNDS_CHECK) { + masm.loadPtr(Address(T0, JSObject::offsetOfElements()), T0); + masm.load32(Address(T0, ObjectElements::offsetOfInitializedLength()), T0); + } else { masm.loadPayload(Address(T0, TypedArray::lengthOffset()), T0); + } int32 constant = entry.u.check.constant; @@ -1393,18 +1395,16 @@ LoopState::restoreInvariants(jsbytecode *pc, Assembler &masm, Jump notObject = masm.testObject(Assembler::NotEqual, frame.addressOf(array)); jumps->append(notObject); masm.loadPayload(frame.addressOf(array), T0); - - uint32 offset = (entry.kind == InvariantEntry::DENSE_ARRAY_SLOTS) - ? JSObject::offsetOfSlots() - : offsetof(JSObject, privateData); + masm.loadPtr(Address(T0, JSObject::offsetOfElements()), T0); Address address = frame.addressOf(frame.getTemporary(entry.u.array.temporary)); - masm.loadPtr(Address(T0, offset), T0); - if (entry.kind == InvariantEntry::DENSE_ARRAY_LENGTH) + if (entry.kind == InvariantEntry::DENSE_ARRAY_LENGTH) { + masm.load32(Address(T0, ObjectElements::offsetOfLength()), T0); masm.storeValueFromComponents(ImmType(JSVAL_TYPE_INT32), T0, address); - else + } else { masm.storePayload(T0, address); + } break; } diff --git a/js/src/methodjit/MonoIC.cpp b/js/src/methodjit/MonoIC.cpp index cb188289eb11..9fe2326872b7 100644 --- a/js/src/methodjit/MonoIC.cpp +++ b/js/src/methodjit/MonoIC.cpp @@ -71,6 +71,7 @@ typedef JSC::MacroAssembler::ImmPtr ImmPtr; typedef JSC::MacroAssembler::Call Call; typedef JSC::MacroAssembler::Label Label; typedef JSC::MacroAssembler::DataLabel32 DataLabel32; +typedef JSC::MacroAssembler::DataLabelPtr DataLabelPtr; #if defined JS_MONOIC @@ -99,11 +100,11 @@ ic::GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic) stubs::GetGlobalName(f); return; } - uint32 slot = shape->slot; + uint32 slot = shape->slot(); /* Patch shape guard. */ Repatcher repatcher(f.jit()); - repatcher.repatch(ic->fastPathStart.dataLabel32AtOffset(ic->shapeOffset), obj->shape()); + repatcher.repatch(ic->fastPathStart.dataLabelPtrAtOffset(ic->shapeOffset), obj->lastProperty()); /* Patch loads. */ uint32 index = obj->dynamicSlotIndex(slot); @@ -151,118 +152,21 @@ PatchSetFallback(VMFrame &f, ic::SetGlobalNameIC *ic) } void -SetGlobalNameIC::patchExtraShapeGuard(Repatcher &repatcher, int32 shape) +SetGlobalNameIC::patchExtraShapeGuard(Repatcher &repatcher, const Shape *shape) { JS_ASSERT(hasExtraStub); JSC::CodeLocationLabel label(JSC::MacroAssemblerCodePtr(extraStub.start())); - repatcher.repatch(label.dataLabel32AtOffset(extraShapeGuard), shape); + repatcher.repatch(label.dataLabelPtrAtOffset(extraShapeGuard), shape); } void -SetGlobalNameIC::patchInlineShapeGuard(Repatcher &repatcher, int32 shape) +SetGlobalNameIC::patchInlineShapeGuard(Repatcher &repatcher, const Shape *shape) { - JSC::CodeLocationDataLabel32 label = fastPathStart.dataLabel32AtOffset(shapeOffset); + JSC::CodeLocationDataLabelPtr label = fastPathStart.dataLabelPtrAtOffset(shapeOffset); repatcher.repatch(label, shape); } -static LookupStatus -UpdateSetGlobalNameStub(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, const Shape *shape) -{ - Repatcher repatcher(ic->extraStub); - - ic->patchExtraShapeGuard(repatcher, obj->shape()); - - uint32 index = obj->dynamicSlotIndex(shape->slot); - JSC::CodeLocationLabel label(JSC::MacroAssemblerCodePtr(ic->extraStub.start())); - label = label.labelAtOffset(ic->extraStoreOffset); - repatcher.patchAddressOffsetForValueStore(label, index * sizeof(Value), - ic->vr.isTypeKnown()); - - return Lookup_Cacheable; -} - -static LookupStatus -AttachSetGlobalNameStub(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, const Shape *shape) -{ - Assembler masm; - - Label start = masm.label(); - - DataLabel32 shapeLabel; - Jump guard = masm.branch32WithPatch(Assembler::NotEqual, ic->shapeReg, Imm32(obj->shape()), - shapeLabel); - - /* A constant object needs rematerialization. */ - if (ic->objConst) - masm.move(ImmPtr(obj), ic->objReg); - - JS_ASSERT(obj->branded()); - - /* - * Load obj->slots. If ic->objConst, then this clobbers objReg, because - * ic->objReg == ic->shapeReg. - */ - JS_ASSERT(!obj->isFixedSlot(shape->slot)); - masm.loadPtr(Address(ic->objReg, JSObject::offsetOfSlots()), ic->shapeReg); - - /* Test if overwriting a function-tagged slot. */ - Address slot(ic->shapeReg, sizeof(Value) * obj->dynamicSlotIndex(shape->slot)); - Jump isNotObject = masm.testObject(Assembler::NotEqual, slot); - - /* Now, test if the object is a function object. */ - masm.loadPayload(slot, ic->shapeReg); - Jump isFun = masm.testFunction(Assembler::Equal, ic->shapeReg); - - /* Restore shapeReg to obj->slots, since we clobbered it. */ - if (ic->objConst) - masm.move(ImmPtr(obj), ic->objReg); - masm.loadPtr(Address(ic->objReg, JSObject::offsetOfSlots()), ic->shapeReg); - - /* If the object test fails, shapeReg is still obj->slots. */ - isNotObject.linkTo(masm.label(), &masm); - DataLabel32 store = masm.storeValueWithAddressOffsetPatch(ic->vr, slot); - - Jump done = masm.jump(); - - JITScript *jit = f.jit(); - LinkerHelper linker(masm, JSC::METHOD_CODE); - JSC::ExecutablePool *ep = linker.init(f.cx); - if (!ep) - return Lookup_Error; - if (!jit->execPools.append(ep)) { - ep->release(); - js_ReportOutOfMemory(f.cx); - return Lookup_Error; - } - - if (!linker.verifyRange(jit)) - return Lookup_Uncacheable; - - linker.link(done, ic->fastPathStart.labelAtOffset(ic->fastRejoinOffset)); - linker.link(guard, ic->slowPathStart); - linker.link(isFun, ic->slowPathStart); - - JSC::CodeLocationLabel cs = linker.finalize(f); - JaegerSpew(JSpew_PICs, "generated setgname stub at %p\n", cs.executableAddress()); - - Repatcher repatcher(f.jit()); - repatcher.relink(ic->fastPathStart.jumpAtOffset(ic->inlineShapeJump), cs); - - int offset = linker.locationOf(shapeLabel) - linker.locationOf(start); - ic->extraShapeGuard = offset; - JS_ASSERT(ic->extraShapeGuard == offset); - - ic->extraStub = JSC::JITCode(cs.executableAddress(), linker.size()); - offset = linker.locationOf(store) - linker.locationOf(start); - ic->extraStoreOffset = offset; - JS_ASSERT(ic->extraStoreOffset == offset); - - ic->hasExtraStub = true; - - return Lookup_Cacheable; -} - static LookupStatus UpdateSetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, const Shape *shape) { @@ -281,36 +185,11 @@ UpdateSetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, const Sh return Lookup_Uncacheable; } - /* Branded sets must guard that they don't overwrite method-valued properties. */ - if (obj->branded()) { - /* - * If this slot has a function valued property, the tail of this opcode - * could change the shape. Even if it doesn't, the IC is probably - * pointless, because it will always hit the function-test path and - * bail out. In these cases, don't bother building or updating the IC. - */ - const Value &v = obj->getSlot(shape->slot); - if (v.isObject() && v.toObject().isFunction()) { - /* - * If we're going to rebrand, the object may unbrand, allowing this - * IC to come back to life. In that case, we don't disable the IC. - */ - if (!ChangesMethodValue(v, f.regs.sp[-1])) - PatchSetFallback(f, ic); - return Lookup_Uncacheable; - } - - if (ic->hasExtraStub) - return UpdateSetGlobalNameStub(f, ic, obj, shape); - - return AttachSetGlobalNameStub(f, ic, obj, shape); - } - /* Object is not branded, so we can use the inline path. */ Repatcher repatcher(f.jit()); - ic->patchInlineShapeGuard(repatcher, obj->shape()); + ic->patchInlineShapeGuard(repatcher, obj->lastProperty()); - uint32 index = obj->dynamicSlotIndex(shape->slot); + uint32 index = obj->dynamicSlotIndex(shape->slot()); JSC::CodeLocationLabel label = ic->fastPathStart.labelAtOffset(ic->loadStoreOffset); repatcher.patchAddressOffsetForValueStore(label, index * sizeof(Value), ic->vr.isTypeKnown()); @@ -457,10 +336,10 @@ class EqualityCompiler : public BaseCompiler linkToStub(rhsFail); } - Jump lhsHasEq = masm.branchTest32(Assembler::NonZero, - Address(lvr.dataReg(), - offsetof(JSObject, flags)), - Imm32(JSObject::HAS_EQUALITY)); + masm.loadObjClass(lvr.dataReg(), ic.tempReg); + Jump lhsHasEq = masm.branchPtr(Assembler::NotEqual, + Address(ic.tempReg, offsetof(Class, ext.equality)), + ImmPtr(NULL)); linkToStub(lhsHasEq); if (rvr.isConstant()) { @@ -734,9 +613,8 @@ class CallCompiler : public BaseCompiler void *ncode = ic.funGuard.labelAtOffset(ic.joinPointOffset).executableAddress(); inlFrame.assemble(ncode, f.pc()); - /* funPtrReg is still valid. Check if a compilation is needed. */ - Address scriptAddr(ic.funPtrReg, offsetof(JSFunction, u) + - offsetof(JSFunction::U::Scripted, script_)); + /* funObjReg is still valid. Check if a compilation is needed. */ + Address scriptAddr(ic.funObjReg, JSFunction::offsetOfNativeOrScript()); masm.loadPtr(scriptAddr, t0); /* @@ -857,7 +735,7 @@ class CallCompiler : public BaseCompiler { JS_ASSERT(ic.frameSize.isStatic()); - /* Slightly less fast path - guard on fun->getFunctionPrivate() instead. */ + /* Slightly less fast path - guard on fun->script() instead. */ Assembler masm; Registers tempRegs(Registers::AvailRegs); @@ -866,12 +744,12 @@ class CallCompiler : public BaseCompiler RegisterID t0 = tempRegs.takeAnyReg().reg(); /* Guard that it's actually a function object. */ - Jump claspGuard = masm.testObjClass(Assembler::NotEqual, ic.funObjReg, &FunctionClass); + Jump claspGuard = masm.testObjClass(Assembler::NotEqual, ic.funObjReg, t0, &FunctionClass); - /* Guard that it's the same function. */ - JSFunction *fun = obj->getFunctionPrivate(); - masm.loadObjPrivate(ic.funObjReg, t0); - Jump funGuard = masm.branchPtr(Assembler::NotEqual, t0, ImmPtr(fun)); + /* Guard that it's the same script. */ + Address scriptAddr(ic.funObjReg, JSFunction::offsetOfNativeOrScript()); + Jump funGuard = masm.branchPtr(Assembler::NotEqual, scriptAddr, + ImmPtr(obj->toFunction()->script())); Jump done = masm.jump(); LinkerHelper linker(masm, JSC::METHOD_CODE); @@ -923,11 +801,10 @@ class CallCompiler : public BaseCompiler args = CallArgsFromSp(f.u.call.dynamicArgc, f.regs.sp); } - JSObject *obj; - if (!IsFunctionObject(args.calleev(), &obj)) + JSFunction *fun; + if (!IsFunctionObject(args.calleev(), &fun)) return false; - JSFunction *fun = obj->getFunctionPrivate(); if ((!callingNew && !fun->isNative()) || (callingNew && !fun->isConstructor())) return false; @@ -947,7 +824,7 @@ class CallCompiler : public BaseCompiler * inlining the parent frame in the first place, so mark the immediate * caller as uninlineable. */ - if (f.script()->hasFunction) { + if (f.script()->function()) { f.script()->uninlineable = true; MarkTypeObjectFlags(cx, f.script()->function(), types::OBJECT_FLAG_UNINLINEABLE); } @@ -972,7 +849,7 @@ class CallCompiler : public BaseCompiler Assembler masm; /* Guard on the function object identity, for now. */ - Jump funGuard = masm.branchPtr(Assembler::NotEqual, ic.funObjReg, ImmPtr(obj)); + Jump funGuard = masm.branchPtr(Assembler::NotEqual, ic.funObjReg, ImmPtr(fun)); /* * Write the rejoin state for the recompiler to use if this call @@ -1069,7 +946,7 @@ class CallCompiler : public BaseCompiler linker.patchJump(ic.slowPathStart.labelAtOffset(ic.slowJoinOffset)); - ic.fastGuardedNative = obj; + ic.fastGuardedNative = fun; linker.link(funGuard, ic.slowPathStart); JSC::CodeLocationLabel start = linker.finalize(f); @@ -1116,8 +993,6 @@ class CallCompiler : public BaseCompiler JS_ASSERT(fun); JSScript *script = fun->script(); JS_ASSERT(script); - JSObject *callee = ucr.callee; - JS_ASSERT(callee); uint32 flags = callingNew ? StackFrame::CONSTRUCTING : 0; @@ -1130,17 +1005,17 @@ class CallCompiler : public BaseCompiler if (!generateFullCallStub(jit, script, flags)) THROWV(NULL); } else { - if (!ic.fastGuardedObject && patchInlinePath(jit, script, callee)) { + if (!ic.fastGuardedObject && patchInlinePath(jit, script, fun)) { // Nothing, done. } else if (ic.fastGuardedObject && !ic.hasJsFunCheck && !ic.fastGuardedNative && - ic.fastGuardedObject->getFunctionPrivate() == fun) { + ic.fastGuardedObject->toFunction()->script() == fun->script()) { /* * Note: Multiple "function guard" stubs are not yet * supported, thus the fastGuardedNative check. */ - if (!generateStubForClosures(jit, callee)) + if (!generateStubForClosures(jit, fun)) THROWV(NULL); } else { if (!generateFullCallStub(jit, script, flags)) @@ -1218,7 +1093,7 @@ ic::SplatApplyArgs(VMFrame &f) */ if (f.u.call.lazyArgsObj) { Value *vp = f.regs.sp - 3; - JS_ASSERT(JS_CALLEE(cx, vp).toObject().getFunctionPrivate()->u.n.native == js_fun_apply); + JS_ASSERT(JS_CALLEE(cx, vp).toObject().toFunction()->u.n.native == js_fun_apply); StackFrame *fp = f.regs.fp(); if (!fp->hasOverriddenArgs()) { @@ -1272,7 +1147,7 @@ ic::SplatApplyArgs(VMFrame &f) } Value *vp = f.regs.sp - 4; - JS_ASSERT(JS_CALLEE(cx, vp).toObject().getFunctionPrivate()->u.n.native == js_fun_apply); + JS_ASSERT(JS_CALLEE(cx, vp).toObject().toFunction()->u.n.native == js_fun_apply); /* * This stub should mimic the steps taken by js_fun_apply. Step 1 and part diff --git a/js/src/methodjit/MonoIC.h b/js/src/methodjit/MonoIC.h index 88335833a1d5..9a1ba3d6b57b 100644 --- a/js/src/methodjit/MonoIC.h +++ b/js/src/methodjit/MonoIC.h @@ -156,8 +156,8 @@ struct SetGlobalNameIC : public GlobalNameIC /* SET only. */ ValueRemat vr; /* RHS value. */ - void patchInlineShapeGuard(Repatcher &repatcher, int32 shape); - void patchExtraShapeGuard(Repatcher &repatcher, int32 shape); + void patchInlineShapeGuard(Repatcher &repatcher, const Shape *shape); + void patchExtraShapeGuard(Repatcher &repatcher, const Shape *shape); }; void JS_FASTCALL GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic); @@ -235,7 +235,6 @@ struct CallICInfo { uint32 slowJoinOffset : 16; RegisterID funObjReg : 5; - RegisterID funPtrReg : 5; bool hit : 1; bool hasJsFunCheck : 1; bool typeMonitored : 1; diff --git a/js/src/methodjit/NunboxAssembler.h b/js/src/methodjit/NunboxAssembler.h index 8dc2a1fa76c1..f43180e19e58 100644 --- a/js/src/methodjit/NunboxAssembler.h +++ b/js/src/methodjit/NunboxAssembler.h @@ -345,8 +345,8 @@ class NunboxAssembler : public JSC::MacroAssembler loadPtr(payloadOf(privAddr), to); } - void loadObjPrivate(RegisterID base, RegisterID to) { - Address priv(base, offsetof(JSObject, privateData)); + void loadObjPrivate(RegisterID base, RegisterID to, uint32 nfixed) { + Address priv(base, JSObject::getPrivateDataOffset(nfixed)); loadPtr(priv, to); } diff --git a/js/src/methodjit/PolyIC.cpp b/js/src/methodjit/PolyIC.cpp index 167681cbe335..3acdbfab7a5e 100644 --- a/js/src/methodjit/PolyIC.cpp +++ b/js/src/methodjit/PolyIC.cpp @@ -70,51 +70,6 @@ typedef JSC::MacroAssembler::Imm32 Imm32; /* Rough over-estimate of how much memory we need to unprotect. */ static const uint32 INLINE_PATH_LENGTH = 64; -/* Static initializer to prime platforms that use constant offsets for ICs. */ -#ifndef JS_HAS_IC_LABELS -ICOffsetInitializer::ICOffsetInitializer() -{ - { - GetPropLabels &labels = PICInfo::getPropLabels_; -#if defined JS_CPU_X86 - labels.dslotsLoadOffset = -15; - labels.inlineShapeOffset = 6; - labels.stubShapeJumpOffset = 12; - labels.inlineValueLoadOffset = -12; -#endif - } - { - SetPropLabels &labels = PICInfo::setPropLabels_; -#if defined JS_CPU_X86 - labels.inlineShapeDataOffset = 6; - /* Store w/ address offset patch is two movs. */ - labels.inlineShapeJumpOffset = 12; - labels.stubShapeJumpOffset = 12; -#endif - } - { - BindNameLabels &labels = PICInfo::bindNameLabels_; -#if defined JS_CPU_X86 - labels.inlineJumpOffset = 10; - labels.stubJumpOffset = 5; -#endif - } - { - ScopeNameLabels &labels = PICInfo::scopeNameLabels_; -#if defined JS_CPU_X86 - labels.inlineJumpOffset = 5; - labels.stubJumpOffset = 5; -#endif - } -} - -ICOffsetInitializer s_ICOffsetInitializer; -GetPropLabels PICInfo::getPropLabels_; -SetPropLabels PICInfo::setPropLabels_; -BindNameLabels PICInfo::bindNameLabels_; -ScopeNameLabels PICInfo::scopeNameLabels_; -#endif - // Helper class to simplify LinkBuffer usage in PIC stub generators. // This guarantees correct OOM and refcount handling for buffers while they // are instantiated and rooted. @@ -197,6 +152,51 @@ class PICStubCompiler : public BaseCompiler } }; +static bool +GeneratePrototypeGuards(JSContext *cx, Vector &mismatches, Assembler &masm, + JSObject *obj, JSObject *holder, + JSC::MacroAssembler::RegisterID objReg, + JSC::MacroAssembler::RegisterID scratchReg) +{ + typedef JSC::MacroAssembler::Address Address; + typedef JSC::MacroAssembler::AbsoluteAddress AbsoluteAddress; + typedef JSC::MacroAssembler::ImmPtr ImmPtr; + typedef JSC::MacroAssembler::Jump Jump; + + if (obj->hasUncacheableProto()) { + masm.loadPtr(Address(objReg, JSObject::offsetOfType()), scratchReg); + Jump j = masm.branchPtr(Assembler::NotEqual, + Address(scratchReg, offsetof(types::TypeObject, proto)), + ImmPtr(obj->getProto())); + if (!mismatches.append(j)) + return false; + } + + JSObject *pobj = obj->getProto(); + while (pobj != holder) { + if (pobj->hasUncacheableProto()) { + Jump j; + if (pobj->hasSingletonType()) { + types::TypeObject *type = pobj->getType(cx); + j = masm.branchPtr(Assembler::NotEqual, + AbsoluteAddress(&type->proto), + ImmPtr(pobj->getProto()), + scratchReg); + } else { + j = masm.branchPtr(Assembler::NotEqual, + AbsoluteAddress(pobj->addressOfType()), + ImmPtr(pobj->type()), + scratchReg); + } + if (!mismatches.append(j)) + return false; + } + pobj = pobj->getProto(); + } + + return true; +} + class SetPropCompiler : public PICStubCompiler { JSObject *obj; @@ -215,7 +215,7 @@ class SetPropCompiler : public PICStubCompiler SetPropLabels &labels = pic.setPropLabels(); repatcher.repatchLEAToLoadPtr(labels.getDslotsLoad(pic.fastPathRejoin, pic.u.vr)); repatcher.repatch(labels.getInlineShapeData(pic.fastPathStart, pic.shapeGuard), - int32(INVALID_SHAPE)); + NULL); repatcher.relink(labels.getInlineShapeJump(pic.fastPathStart.labelAtOffset(pic.shapeGuard)), pic.slowPathStart); @@ -232,7 +232,7 @@ class SetPropCompiler : public PICStubCompiler SetPropLabels &labels = pic.setPropLabels(); int32 offset; - if (obj->isFixedSlot(shape->slot)) { + if (obj->isFixedSlot(shape->slot())) { CodeLocationInstruction istr = labels.getDslotsLoad(pic.fastPathRejoin, pic.u.vr); repatcher.repatchLoadPtrToLEA(istr); @@ -246,15 +246,14 @@ class SetPropCompiler : public PICStubCompiler int32 diff = int32(JSObject::getFixedSlotOffset(0)) - int32(JSObject::offsetOfSlots()); JS_ASSERT(diff != 0); - offset = (int32(shape->slot) * sizeof(Value)) + diff; + offset = (int32(shape->slot()) * sizeof(Value)) + diff; } else { - offset = obj->dynamicSlotIndex(shape->slot) * sizeof(Value); + offset = obj->dynamicSlotIndex(shape->slot()) * sizeof(Value); } repatcher.repatch(labels.getInlineShapeData(pic.fastPathStart, pic.shapeGuard), - obj->shape()); - repatcher.patchAddressOffsetForValueStore(labels.getInlineValueStore(pic.fastPathRejoin, - pic.u.vr), + obj->lastProperty()); + repatcher.patchAddressOffsetForValueStore(labels.getInlineValueStore(pic.fastPathRejoin), offset, pic.u.vr.isTypeKnown()); pic.inlinePathPatched = true; @@ -284,7 +283,7 @@ class SetPropCompiler : public PICStubCompiler repatcher.relink(label.jumpAtOffset(secondGuardOffset), cs); } - LookupStatus generateStub(uint32 initialShape, const Shape *shape, bool adding) + LookupStatus generateStub(const Shape *initialShape, const Shape *shape, bool adding) { if (hadGC()) return Lookup_Uncacheable; @@ -302,8 +301,8 @@ class SetPropCompiler : public PICStubCompiler } Label start = masm.label(); - Jump shapeGuard = masm.branch32FixedLength(Assembler::NotEqual, pic.shapeReg, - Imm32(initialShape)); + Jump shapeGuard = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, + ImmPtr(initialShape)); Label stubShapeJumpLabel = masm.label(); @@ -335,6 +334,11 @@ class SetPropCompiler : public PICStubCompiler JS_ASSERT(shape->hasSlot()); pic.shapeRegHasBaseShape = false; + if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, NULL, + pic.objReg, pic.shapeReg)) { + return error(); + } + /* Emit shape guards for the object's prototype chain. */ JSObject *proto = obj->getProto(); RegisterID lastReg = pic.objReg; @@ -355,7 +359,7 @@ class SetPropCompiler : public PICStubCompiler * We already know it is a function, so test the payload. */ JS_ASSERT(shape->isMethod()); - JSObject *funobj = &shape->methodObject(); + JSObject *funobj = obj->nativeGetMethod(shape); if (pic.u.vr.isConstant()) { JS_ASSERT(funobj == &pic.u.vr.value().toObject()); } else { @@ -366,56 +370,30 @@ class SetPropCompiler : public PICStubCompiler } } - if (obj->isFixedSlot(shape->slot)) { + if (obj->isFixedSlot(shape->slot())) { Address address(pic.objReg, - JSObject::getFixedSlotOffset(shape->slot)); + JSObject::getFixedSlotOffset(shape->slot())); masm.storeValue(pic.u.vr, address); } else { - /* Check capacity. */ - Address capacity(pic.objReg, offsetof(JSObject, capacity)); - masm.load32(capacity, pic.shapeReg); - Jump overCapacity = masm.branch32(Assembler::LessThanOrEqual, pic.shapeReg, - Imm32(shape->slot)); - if (!slowExits.append(overCapacity)) - return error(); - + /* + * Note: the guard on the initial shape determines the object's + * number of fixed slots and slot span, which in turn determine + * the number of dynamic slots allocated for the object. + * We don't need to check capacity here. + */ masm.loadPtr(Address(pic.objReg, JSObject::offsetOfSlots()), pic.shapeReg); - Address address(pic.shapeReg, obj->dynamicSlotIndex(shape->slot) * sizeof(Value)); + Address address(pic.shapeReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value)); masm.storeValue(pic.u.vr, address); } - uint32 newShape = obj->shape(); - JS_ASSERT(newShape != initialShape); + JS_ASSERT(shape == obj->lastProperty()); + JS_ASSERT(shape != initialShape); /* Write the object's new shape. */ - masm.storePtr(ImmPtr(shape), Address(pic.objReg, offsetof(JSObject, lastProp))); - masm.store32(Imm32(newShape), Address(pic.objReg, offsetof(JSObject, objShape))); - - /* If this is a method shape, update the object's flags. */ - if (shape->isMethod()) { - Address flags(pic.objReg, offsetof(JSObject, flags)); - - /* Use shapeReg to load, bitwise-or, and store flags. */ - masm.load32(flags, pic.shapeReg); - masm.or32(Imm32(JSObject::METHOD_BARRIER), pic.shapeReg); - masm.store32(pic.shapeReg, flags); - } + masm.storePtr(ImmPtr(shape), Address(pic.objReg, JSObject::offsetOfShape())); } else if (shape->hasDefaultSetter()) { - Address address = masm.objPropAddress(obj, pic.objReg, shape->slot); - - // If the scope is branded, or has a method barrier. It's now necessary - // to guard that we're not overwriting a function-valued property. - if (obj->brandedOrHasMethodBarrier()) { - masm.loadTypeTag(address, pic.shapeReg); - Jump skip = masm.testObject(Assembler::NotEqual, pic.shapeReg); - masm.loadPayload(address, pic.shapeReg); - Jump rebrand = masm.testFunction(Assembler::Equal, pic.shapeReg); - if (!slowExits.append(rebrand)) - return error(); - skip.linkTo(masm.label(), &masm); - pic.shapeRegHasBaseShape = false; - } - + JS_ASSERT(!shape->isMethod()); + Address address = masm.objPropAddress(obj, pic.objReg, shape->slot()); masm.storeValue(pic.u.vr, address); } else { // \ / In general, two function objects with different JSFunctions @@ -426,10 +404,10 @@ class SetPropCompiler : public PICStubCompiler // \===/ 2. arguments and locals have different getters // then we can rely on fun->nargs remaining invariant. JSFunction *fun = obj->asCall().getCalleeFunction(); - uint16 slot = uint16(shape->shortid); + uint16 slot = uint16(shape->shortid()); /* Guard that the call object has a frame. */ - masm.loadObjPrivate(pic.objReg, pic.shapeReg); + masm.loadObjPrivate(pic.objReg, pic.shapeReg, obj->numFixedSlots()); Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg); { @@ -487,9 +465,9 @@ class SetPropCompiler : public PICStubCompiler if (skipOver.isSet()) buffer.link(skipOver.get(), pic.fastPathRejoin); CodeLocationLabel cs = buffer.finalize(f); - JaegerSpew(JSpew_PICs, "generate setprop stub %p %d %d at %p\n", + JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n", (void*)&pic, - initialShape, + (void*)initialShape, pic.stubsGenerated, cs.executableAddress()); @@ -592,12 +570,20 @@ class SetPropCompiler : public PICStubCompiler if (js_IdIsIndex(id, &index)) return disable("index"); - uint32 initialShape = obj->shape(); + /* + * When adding a property we need to check shapes along the entire + * prototype chain to watch for an added setter. + */ + JSObject *proto = obj; + while (proto) { + if (!proto->isNative()) + return disable("non-native proto"); + proto = proto->getProto(); + } - if (!obj->ensureClassReservedSlots(cx)) - return error(); + const Shape *initialShape = obj->lastProperty(); + uint32 slots = obj->numDynamicSlots(); - uint32 slots = obj->numSlots(); uintN flags = 0; PropertyOp getter = clasp->getProperty; @@ -606,11 +592,10 @@ class SetPropCompiler : public PICStubCompiler return disable("can't have method barrier"); JSObject *funobj = &f.regs.sp[-1].toObject(); - if (funobj != funobj->getFunctionPrivate()) + if (funobj->toFunction()->isClonedMethod()) return disable("mismatched function"); flags |= Shape::METHOD; - getter = CastAsPropertyOp(funobj); } /* @@ -625,7 +610,7 @@ class SetPropCompiler : public PICStubCompiler if (!shape) return error(); if (flags & Shape::METHOD) - obj->nativeSetSlot(shape->slot, f.regs.sp[-1]); + obj->nativeSetSlot(shape->slot(), f.regs.sp[-1]); if (monitor.recompiled()) return Lookup_Uncacheable; @@ -655,7 +640,7 @@ class SetPropCompiler : public PICStubCompiler * usually be a slowdown even if there *are* other shapes that * don't realloc. */ - if (obj->numSlots() != slots) + if (obj->numDynamicSlots() != slots) return disable("insufficient slot capacity"); if (pic.typeMonitored && !updateMonitoredTypes()) @@ -669,6 +654,8 @@ class SetPropCompiler : public PICStubCompiler return disable("set method on non-method shape"); if (!shape->writable()) return disable("readonly"); + if (shape->isMethod()) + return disable("method"); if (shape->hasDefaultSetter()) { if (!shape->hasSlot()) @@ -697,8 +684,8 @@ class SetPropCompiler : public PICStubCompiler RecompilationMonitor monitor(cx); JSFunction *fun = obj->asCall().getCalleeFunction(); JSScript *script = fun->script(); - uint16 slot = uint16(shape->shortid); - if (!script->ensureHasTypes(cx, fun)) + uint16 slot = uint16(shape->shortid()); + if (!script->ensureHasTypes(cx)) return error(); { types::AutoEnterTypeInference enter(cx); @@ -714,14 +701,13 @@ class SetPropCompiler : public PICStubCompiler JS_ASSERT(obj == holder); if (!pic.inlinePathPatched && - !obj->brandedOrHasMethodBarrier() && shape->hasDefaultSetter() && !pic.typeMonitored && !obj->isDenseArray()) { return patchInline(shape); } - return generateStub(obj->shape(), shape, false); + return generateStub(obj->lastProperty(), shape, false); } }; @@ -865,8 +851,7 @@ class GetPropCompiler : public PICStubCompiler { GetPropLabels &labels = pic.getPropLabels(); repatcher.repatchLEAToLoadPtr(labels.getDslotsLoad(pic.fastPathRejoin)); - repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), - int32(INVALID_SHAPE)); + repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), NULL); repatcher.relink(labels.getInlineShapeJump(pic.getFastShapeGuard()), pic.slowPathStart); if (pic.hasTypeCheck()) { @@ -895,7 +880,7 @@ class GetPropCompiler : public PICStubCompiler { Assembler masm; - Jump notArgs = masm.testObjClass(Assembler::NotEqual, pic.objReg, obj->getClass()); + Jump notArgs = masm.guardShape(pic.objReg, obj); masm.load32(Address(pic.objReg, JSObject::getFixedSlotOffset(ArgumentsObject::INITIAL_LENGTH_SLOT)), pic.objReg); masm.move(pic.objReg, pic.shapeReg); @@ -941,7 +926,8 @@ class GetPropCompiler : public PICStubCompiler Jump notArray = masm.testClass(Assembler::NotEqual, pic.shapeReg, &SlowArrayClass); isDense.linkTo(masm.label(), &masm); - masm.load32(Address(pic.objReg, offsetof(JSObject, privateData)), pic.objReg); + masm.loadPtr(Address(pic.objReg, JSObject::offsetOfElements()), pic.objReg); + masm.load32(Address(pic.objReg, ObjectElements::offsetOfLength()), pic.objReg); Jump oob = masm.branch32(Assembler::Above, pic.objReg, Imm32(JSVAL_INT_MAX)); masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg); Jump done = masm.jump(); @@ -976,7 +962,7 @@ class GetPropCompiler : public PICStubCompiler { Assembler masm; - Jump notStringObj = masm.testObjClass(Assembler::NotEqual, pic.objReg, obj->getClass()); + Jump notStringObj = masm.guardShape(pic.objReg, obj); masm.loadPayload(Address(pic.objReg, JSObject::getPrimitiveThisOffset()), pic.objReg); masm.loadPtr(Address(pic.objReg, JSString::offsetOfLengthAndFlags()), pic.objReg); @@ -1054,8 +1040,8 @@ class GetPropCompiler : public PICStubCompiler */ masm.move(ImmPtr(obj), pic.objReg); masm.loadShape(pic.objReg, pic.shapeReg); - Jump shapeMismatch = masm.branch32(Assembler::NotEqual, pic.shapeReg, - Imm32(obj->shape())); + Jump shapeMismatch = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, + ImmPtr(obj->lastProperty())); masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg); Jump done = masm.jump(); @@ -1139,7 +1125,7 @@ class GetPropCompiler : public PICStubCompiler GetPropLabels &labels = pic.getPropLabels(); int32 offset; - if (holder->isFixedSlot(shape->slot)) { + if (holder->isFixedSlot(shape->slot())) { CodeLocationInstruction istr = labels.getDslotsLoad(pic.fastPathRejoin); repatcher.repatchLoadPtrToLEA(istr); @@ -1153,12 +1139,12 @@ class GetPropCompiler : public PICStubCompiler int32 diff = int32(JSObject::getFixedSlotOffset(0)) - int32(JSObject::offsetOfSlots()); JS_ASSERT(diff != 0); - offset = (int32(shape->slot) * sizeof(Value)) + diff; + offset = (int32(shape->slot()) * sizeof(Value)) + diff; } else { - offset = holder->dynamicSlotIndex(shape->slot) * sizeof(Value); + offset = holder->dynamicSlotIndex(shape->slot()) * sizeof(Value); } - repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), obj->shape()); + repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), obj->lastProperty()); repatcher.patchAddressOffsetForValueLoad(labels.getValueLoad(pic.fastPathRejoin), offset); pic.inlinePathPatched = true; @@ -1228,7 +1214,7 @@ class GetPropCompiler : public PICStubCompiler masm.restoreStackBase(); masm.setupABICall(Registers::NormalCall, 4); masm.storeArg(3, vpReg); - masm.storeArg(2, ImmPtr((void *) JSID_BITS(SHAPE_USERID(shape)))); + masm.storeArg(2, ImmPtr((void *) JSID_BITS(shape->getUserId()))); masm.storeArg(1, holdObjReg); masm.storeArg(0, cxReg); @@ -1265,7 +1251,9 @@ class GetPropCompiler : public PICStubCompiler bool setStubShapeOffset = true; if (obj->isDenseArray()) { start = masm.label(); - shapeGuardJump = masm.testObjClass(Assembler::NotEqual, pic.objReg, obj->getClass()); + shapeGuardJump = masm.branchPtr(Assembler::NotEqual, + Address(pic.objReg, JSObject::offsetOfShape()), + ImmPtr(obj->lastProperty())); /* * No need to assert validity of GETPROP_STUB_SHAPE_JUMP in this case: @@ -1281,8 +1269,8 @@ class GetPropCompiler : public PICStubCompiler } start = masm.label(); - shapeGuardJump = masm.branch32FixedLength(Assembler::NotEqual, pic.shapeReg, - Imm32(obj->shape())); + shapeGuardJump = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, + ImmPtr(obj->lastProperty())); } Label stubShapeJumpLabel = masm.label(); @@ -1291,6 +1279,11 @@ class GetPropCompiler : public PICStubCompiler RegisterID holderReg = pic.objReg; if (obj != holder) { + if (!GeneratePrototypeGuards(cx, shapeMismatches, masm, obj, holder, + pic.objReg, pic.shapeReg)) { + return error(); + } + // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it. holderReg = pic.shapeReg; masm.move(ImmPtr(holder), holderReg); @@ -1442,24 +1435,18 @@ class ScopeNameCompiler : public PICStubCompiler return disable("non-cacheable scope chain object"); JS_ASSERT(tobj->isNative()); - if (tobj != scopeChain) { - /* scopeChain will never be NULL, but parents can be NULL. */ - Jump j = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg); - if (!fails.append(j)) - return error(); - } - /* Guard on intervening shapes. */ masm.loadShape(pic.objReg, pic.shapeReg); - Jump j = masm.branch32(Assembler::NotEqual, pic.shapeReg, Imm32(tobj->shape())); + Jump j = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, + ImmPtr(tobj->lastProperty())); if (!fails.append(j)) return error(); /* Load the next link in the scope chain. */ - Address parent(pic.objReg, offsetof(JSObject, parent)); - masm.loadPtr(parent, pic.objReg); + Address parent(pic.objReg, JSObject::offsetOfInternalScopeChain()); + masm.loadPayload(parent, pic.objReg); - tobj = tobj->getParent(); + tobj = tobj->internalScopeChain(); } if (tobj != getprop.holder) @@ -1525,7 +1512,8 @@ class ScopeNameCompiler : public PICStubCompiler if (pic.kind == ic::PICInfo::NAME || pic.kind == ic::PICInfo::CALLNAME) finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg); masm.loadShape(pic.objReg, pic.shapeReg); - Jump finalShape = masm.branch32(Assembler::NotEqual, pic.shapeReg, Imm32(getprop.holder->shape())); + Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, + ImmPtr(getprop.holder->lastProperty())); masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg); @@ -1618,7 +1606,8 @@ class ScopeNameCompiler : public PICStubCompiler if (pic.kind == ic::PICInfo::NAME || pic.kind == ic::PICInfo::CALLNAME) finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg); masm.loadShape(pic.objReg, pic.shapeReg); - Jump finalShape = masm.branch32(Assembler::NotEqual, pic.shapeReg, Imm32(getprop.holder->shape())); + Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, + ImmPtr(getprop.holder->lastProperty())); /* * For CALLNAME we have to store the this-value. Since we guarded on @@ -1633,10 +1622,10 @@ class ScopeNameCompiler : public PICStubCompiler } /* Get callobj's stack frame. */ - masm.loadObjPrivate(pic.objReg, pic.shapeReg); + masm.loadObjPrivate(pic.objReg, pic.shapeReg, getprop.holder->numFixedSlots()); JSFunction *fun = getprop.holder->asCall().getCalleeFunction(); - uint16 slot = uint16(shape->shortid); + uint16 slot = uint16(shape->shortid()); Jump skipOver; Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg); @@ -1736,7 +1725,7 @@ class ScopeNameCompiler : public PICStubCompiler if (status != Lookup_Cacheable) return status; - if (!obj->getParent()) + if (obj->isGlobal()) return generateGlobalStub(obj); return disable("scope object not handled yet"); @@ -1831,25 +1820,22 @@ class BindNameCompiler : public PICStubCompiler /* Guard on the shape of the scope chain. */ masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg); masm.loadShape(pic.objReg, pic.shapeReg); - Jump firstShape = masm.branch32(Assembler::NotEqual, pic.shapeReg, - Imm32(scopeChain->shape())); + Jump firstShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, + ImmPtr(scopeChain->lastProperty())); /* Walk up the scope chain. */ JSObject *tobj = scopeChain; - Address parent(pic.objReg, offsetof(JSObject, parent)); + Address parent(pic.objReg, JSObject::offsetOfInternalScopeChain()); while (tobj && tobj != obj) { if (!IsCacheableNonGlobalScope(tobj)) return disable("non-cacheable obj in scope chain"); - masm.loadPtr(parent, pic.objReg); - Jump nullTest = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg); - if (!fails.append(nullTest)) - return error(); + masm.loadPayload(parent, pic.objReg); masm.loadShape(pic.objReg, pic.shapeReg); - Jump shapeTest = masm.branch32(Assembler::NotEqual, pic.shapeReg, - Imm32(tobj->shape())); + Jump shapeTest = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, + ImmPtr(tobj->lastProperty())); if (!fails.append(shapeTest)) return error(); - tobj = tobj->getParent(); + tobj = tobj->internalScopeChain(); } if (tobj != obj) return disable("indirect hit"); @@ -1893,7 +1879,6 @@ class BindNameCompiler : public PICStubCompiler JSObject *update() { - JS_ASSERT(scopeChain->getParent()); RecompilationMonitor monitor(cx); JSObject *obj = js_FindIdentifierBase(cx, scopeChain, ATOM_TO_JSID(atom)); @@ -2110,17 +2095,8 @@ ic::CallProp(VMFrame &f, ic::PICInfo *pic) JSAtom *atom; JS_PROPERTY_CACHE(cx).test(cx, f.pc(), aobj, obj2, entry, atom); if (!atom) { - if (entry->vword.isFunObj()) { - rval.setObject(entry->vword.toFunObj()); - } else if (entry->vword.isSlot()) { - uint32 slot = entry->vword.toSlot(); - rval = obj2->nativeGetSlot(slot); - } else { - JS_ASSERT(entry->vword.isShape()); - const Shape *shape = entry->vword.toShape(); - NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval, - THROW()); - } + NATIVE_GET(cx, &objv.toObject(), obj2, entry->prop, JSGET_NO_METHOD_BARRIER, &rval, + THROW()); /* * Adjust the stack to reflect the height after the GETPROP, here and * below. Getter hook ICs depend on this to know which value of sp they @@ -2406,8 +2382,8 @@ GetElementIC::purge(Repatcher &repatcher) // Repatch the inline jumps. if (inlineTypeGuardPatched) repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart); - if (inlineClaspGuardPatched) - repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart); + if (inlineShapeGuardPatched) + repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), slowPathStart); if (slowCallPatched) { if (op == JSOP_GETELEM) { @@ -2468,19 +2444,19 @@ GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, jsid id, if (!idRemat.isConstant()) atomIdGuard = masm.branchPtr(Assembler::NotEqual, idRemat.dataReg(), ImmPtr(v.toString())); - // Guard on the base shape (or in the dense array case, the clasp). - Jump shapeGuard; - if (obj->isDenseArray()) { - shapeGuard = masm.testObjClass(Assembler::NotEqual, objReg, obj->getClass()); - } else { - shapeGuard = masm.branch32(Assembler::NotEqual, typeReg, Imm32(obj->shape())); - } + // Guard on the base shape. + Jump shapeGuard = masm.branchPtr(Assembler::NotEqual, typeReg, ImmPtr(obj->lastProperty())); + + Vector otherGuards(cx); // Guard on the prototype, if applicable. MaybeJump protoGuard; JSObject *holder = getprop.holder; RegisterID holderReg = objReg; if (obj != holder) { + if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, holder, objReg, typeReg)) + return error(cx); + // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it. holderReg = typeReg; masm.move(ImmPtr(holder), holderReg); @@ -2519,19 +2495,21 @@ GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, jsid id, buffer.maybeLink(atomTypeGuard, slowPathStart); buffer.link(shapeGuard, slowPathStart); buffer.maybeLink(protoGuard, slowPathStart); + for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj) + buffer.link(*pj, slowPathStart); buffer.link(done, fastPathRejoin); CodeLocationLabel cs = buffer.finalize(f); #if DEBUG char *chars = DeflateString(cx, v.toString()->getChars(cx), v.toString()->length()); - JaegerSpew(JSpew_PICs, "generated %s stub at %p for atom %p (\"%s\") shape 0x%x (%s: %d)\n", + JaegerSpew(JSpew_PICs, "generated %s stub at %p for atom %p (\"%s\") shape %p (%s: %d)\n", js_CodeName[op], cs.executableAddress(), (void*)JSID_TO_ATOM(id), chars, - holder->shape(), cx->fp()->script()->filename, CurrentLine(cx)); + (void*)holder->lastProperty(), cx->fp()->script()->filename, CurrentLine(cx)); cx->free_(chars); #endif // Update the inline guards, if needed. - if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalClaspGuard()) { + if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalShapeGuard()) { Repatcher repatcher(cx->fp()->jit()); if (shouldPatchInlineTypeGuard()) { @@ -2544,15 +2522,15 @@ GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, jsid id, inlineTypeGuardPatched = true; } - if (shouldPatchUnconditionalClaspGuard()) { - // The clasp guard is unconditional, meaning there is no type + if (shouldPatchUnconditionalShapeGuard()) { + // The shape guard is unconditional, meaning there is no type // check. This is the first stub, so it has to be patched. Note - // that it is wrong to patch the inline clasp guard otherwise, + // that it is wrong to patch the inline shape guard otherwise, // because it follows an integer-id guard. JS_ASSERT(!hasInlineTypeGuard()); - repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), cs); - inlineClaspGuardPatched = true; + repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs); + inlineShapeGuardPatched = true; } } @@ -2594,10 +2572,7 @@ GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, jsid id, disable(cx, "max stubs reached"); // Finally, fetch the value to avoid redoing the property lookup. - if (shape->isMethod()) - *vp = ObjectValue(shape->methodObject()); - else - *vp = holder->getSlot(shape->slot); + *vp = holder->getSlot(shape->slot()); return Lookup_Cacheable; } @@ -2617,7 +2592,7 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id Assembler masm; - Jump claspGuard = masm.testObjClass(Assembler::NotEqual, objReg, obj->getClass()); + Jump shapeGuard = masm.testObjClass(Assembler::NotEqual, objReg, typeReg, obj->getClass()); masm.move(objReg, typeReg); masm.load32(Address(objReg, JSObject::getFixedSlotOffset(ArgumentsObject::INITIAL_LENGTH_SLOT)), @@ -2633,7 +2608,7 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id outOfBounds = masm.branch32(Assembler::BelowOrEqual, objReg, idRemat.dataReg()); } - masm.loadPayload(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::DATA_SLOT)), objReg); + masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::DATA_SLOT)), objReg); if (idRemat.isConstant()) { Address slot(objReg, offsetof(ArgumentsData, slots) + v.toInt32() * sizeof(Value)); masm.loadTypeTag(slot, objReg); @@ -2644,8 +2619,8 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id } Jump holeCheck = masm.branchPtr(Assembler::Equal, objReg, ImmType(JSVAL_TYPE_MAGIC)); - Address privateData(typeReg, offsetof(JSObject, privateData)); - Jump liveArguments = masm.branchPtr(Assembler::NotEqual, privateData, ImmPtr(0)); + masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::STACK_FRAME_SLOT)), objReg); + Jump liveArguments = masm.branchPtr(Assembler::NotEqual, objReg, ImmPtr(0)); masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::DATA_SLOT)), objReg); @@ -2662,7 +2637,7 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id liveArguments.linkTo(masm.label(), &masm); - masm.loadPtr(privateData, typeReg); + masm.move(objReg, typeReg); Address fun(typeReg, StackFrame::offsetOfExec()); masm.loadPtr(fun, objReg); @@ -2723,7 +2698,7 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id if (!buffer.verifyRange(cx->fp()->jit())) return disable(cx, "code memory is out of range"); - buffer.link(claspGuard, slowPathStart); + buffer.link(shapeGuard, slowPathStart); buffer.link(overridden, slowPathStart); buffer.link(outOfBounds, slowPathStart); buffer.link(holeCheck, slowPathStart); @@ -2735,12 +2710,12 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id JaegerSpew(JSpew_PICs, "generated getelem arguments stub at %p\n", cs.executableAddress()); Repatcher repatcher(cx->fp()->jit()); - repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), cs); + repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs); - JS_ASSERT(!shouldPatchUnconditionalClaspGuard()); - JS_ASSERT(!inlineClaspGuardPatched); + JS_ASSERT(!shouldPatchUnconditionalShapeGuard()); + JS_ASSERT(!inlineShapeGuardPatched); - inlineClaspGuardPatched = true; + inlineShapeGuardPatched = true; stubsGenerated++; if (stubsGenerated == MAX_GETELEM_IC_STUBS) @@ -2766,14 +2741,14 @@ GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid i if (op == JSOP_CALLELEM) return disable(cx, "typed array with call"); - // The fast-path guarantees that after the dense clasp guard, the type is + // The fast-path guarantees that after the dense shape guard, the type is // known to be int32, either via type inference or the inline type check. JS_ASSERT(hasInlineTypeGuard() || idRemat.knownType() == JSVAL_TYPE_INT32); Assembler masm; - // Guard on this typed array's clasp. - Jump claspGuard = masm.testObjClass(Assembler::NotEqual, objReg, obj->getClass()); + // Guard on this typed array's shape/class. + Jump shapeGuard = masm.guardShape(objReg, obj); // Bounds check. Jump outOfBounds; @@ -2807,21 +2782,21 @@ GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid i if (!buffer.verifyRange(cx->fp()->jit())) return disable(cx, "code memory is out of range"); - buffer.link(claspGuard, slowPathStart); + buffer.link(shapeGuard, slowPathStart); buffer.link(outOfBounds, slowPathStart); buffer.link(done, fastPathRejoin); CodeLocationLabel cs = buffer.finalizeCodeAddendum(); JaegerSpew(JSpew_PICs, "generated getelem typed array stub at %p\n", cs.executableAddress()); - // If we can generate a typed array stub, the clasp guard is conditional. + // If we can generate a typed array stub, the shape guard is conditional. // Also, we only support one typed array. - JS_ASSERT(!shouldPatchUnconditionalClaspGuard()); - JS_ASSERT(!inlineClaspGuardPatched); + JS_ASSERT(!shouldPatchUnconditionalShapeGuard()); + JS_ASSERT(!inlineShapeGuardPatched); Repatcher repatcher(cx->fp()->jit()); - repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), cs); - inlineClaspGuardPatched = true; + repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs); + inlineShapeGuardPatched = true; stubsGenerated++; @@ -2852,6 +2827,9 @@ GetElementIC::update(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value * if (v.isString() && js_CheckForStringIndex(id) == id) return attachGetProp(f, obj, v, id, vp); + if (obj->isArguments()) + return attachArguments(f, obj, v, id, vp); + #if defined JS_METHODJIT_TYPED_ARRAY /* * Typed array ICs can make stub calls, and need to know which registers @@ -2998,8 +2976,8 @@ void SetElementIC::purge(Repatcher &repatcher) { // Repatch the inline jumps. - if (inlineClaspGuardPatched) - repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart); + if (inlineShapeGuardPatched) + repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), slowPathStart); if (inlineHoleGuardPatched) repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart); @@ -3021,18 +2999,17 @@ SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32 keyval) // We may have failed a capacity check instead of a dense array check. // However we should still build the IC in this case, since it could - // be in a loop that is filling in the array. We can assert, however, - // that either we're in capacity or there's a hole - guaranteed by - // the fast path. - JS_ASSERT((jsuint)keyval >= obj->getDenseArrayInitializedLength() || - obj->getDenseArrayElement(keyval).isMagic(JS_ARRAY_HOLE)); + // be in a loop that is filling in the array. if (js_PrototypeHasIndexedProperties(cx, obj)) return disable(cx, "prototype has indexed properties"); Assembler masm; - Vector fails(cx); + Vector fails(cx); + + if (!GeneratePrototypeGuards(cx, fails, masm, obj, NULL, objReg, objReg)) + return error(cx); // Test for indexed properties in Array.prototype. We test each shape // along the proto chain. This affords us two optimizations: @@ -3051,27 +3028,31 @@ SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32 keyval) // Restore |obj|. masm.rematPayload(StateRemat::FromInt32(objRemat), objReg); - // Guard against negative indices. - MaybeJump keyGuard; - if (!hasConstantKey) - keyGuard = masm.branch32(Assembler::LessThan, keyReg, Imm32(0)); + // Load the elements. + masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), objReg); - // Update the array length if necessary. - Jump skipUpdate; - Address arrayLength(objReg, offsetof(JSObject, privateData)); - if (hasConstantKey) { - skipUpdate = masm.branch32(Assembler::Above, arrayLength, Imm32(keyValue)); - masm.store32(Imm32(keyValue + 1), arrayLength); - } else { - skipUpdate = masm.branch32(Assembler::Above, arrayLength, keyReg); - masm.add32(Imm32(1), keyReg); - masm.store32(keyReg, arrayLength); - masm.sub32(Imm32(1), keyReg); - } - skipUpdate.linkTo(masm.label(), &masm); + Int32Key key = hasConstantKey ? Int32Key::FromConstant(keyValue) : Int32Key::FromRegister(keyReg); + + // Guard that the initialized length is being updated exactly. + fails.append(masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(), + objReg, key, Assembler::NotEqual)); + + // Check the array capacity. + fails.append(masm.guardArrayExtent(ObjectElements::offsetOfCapacity(), + objReg, key, Assembler::BelowOrEqual)); + + masm.bumpKey(key, 1); + + // Update the length and initialized length. + masm.storeKey(key, Address(objReg, ObjectElements::offsetOfInitializedLength())); + Jump lengthGuard = masm.guardArrayExtent(ObjectElements::offsetOfLength(), + objReg, key, Assembler::AboveOrEqual); + masm.storeKey(key, Address(objReg, ObjectElements::offsetOfLength())); + lengthGuard.linkTo(masm.label(), &masm); + + masm.bumpKey(key, -1); // Store the value back. - masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), objReg); if (hasConstantKey) { Address slot(objReg, keyValue * sizeof(Value)); masm.storeValue(vr, slot); @@ -3114,14 +3095,17 @@ SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32 keyval) LookupStatus SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32 key) { - // Right now, only one clasp guard extension is supported. - JS_ASSERT(!inlineClaspGuardPatched); + // Right now, only one shape guard extension is supported. + JS_ASSERT(!inlineShapeGuardPatched); Assembler masm; JSContext *cx = f.cx; - // Guard on this typed array's clasp. - Jump claspGuard = masm.testObjClass(Assembler::NotEqual, objReg, obj->getClass()); + // Restore |obj|. + masm.rematPayload(StateRemat::FromInt32(objRemat), objReg); + + // Guard on this typed array's shape. + Jump shapeGuard = masm.guardShape(objReg, obj); // Bounds check. Jump outOfBounds; @@ -3131,9 +3115,6 @@ SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32 key) else outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, keyReg); - // Restore |obj|. - masm.rematPayload(StateRemat::FromInt32(objRemat), objReg); - // Load the array's packed data vector. masm.loadPtr(Address(objReg, TypedArray::dataOffset()), objReg); @@ -3176,7 +3157,7 @@ SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32 key) return disable(cx, "code memory is out of range"); // Note that the out-of-bounds path simply does nothing. - buffer.link(claspGuard, slowPathStart); + buffer.link(shapeGuard, slowPathStart); buffer.link(outOfBounds, fastPathRejoin); buffer.link(done, fastPathRejoin); masm.finalize(buffer); @@ -3185,8 +3166,8 @@ SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32 key) JaegerSpew(JSpew_PICs, "generated setelem typed array stub at %p\n", cs.executableAddress()); Repatcher repatcher(cx->fp()->jit()); - repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), cs); - inlineClaspGuardPatched = true; + repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs); + inlineShapeGuardPatched = true; stubsGenerated++; diff --git a/js/src/methodjit/PolyIC.h b/js/src/methodjit/PolyIC.h index f54170241dbd..3e9abd20c4a4 100644 --- a/js/src/methodjit/PolyIC.h +++ b/js/src/methodjit/PolyIC.h @@ -233,22 +233,22 @@ struct GetElementIC : public BasePolyIC { // This is only set if hasInlineTypeCheck() is true. unsigned inlineTypeGuard : 8; - // Offset from the fast path to the inline clasp guard. This is always + // Offset from the fast path to the inline shape guard. This is always // set; if |id| is known to not be int32, then it's an unconditional // jump to the slow path. - unsigned inlineClaspGuard : 8; + unsigned inlineShapeGuard : 8; // This is usable if hasInlineTypeGuard() returns true, which implies // that a dense array fast path exists. The inline type guard serves as // the head of the chain of all string-based element stubs. bool inlineTypeGuardPatched : 1; - // This is always usable, and specifies whether the inline clasp guard + // This is always usable, and specifies whether the inline shape guard // has been patched. If hasInlineTypeGuard() is true, it guards against // a dense array, and guarantees the inline type guard has passed. - // Otherwise, there is no inline type guard, and the clasp guard is just + // Otherwise, there is no inline type guard, and the shape guard is just // an unconditional jump. - bool inlineClaspGuardPatched : 1; + bool inlineShapeGuardPatched : 1; //////////////////////////////////////////// // State for string-based property stubs. // @@ -282,18 +282,18 @@ struct GetElementIC : public BasePolyIC { bool shouldPatchInlineTypeGuard() { return hasInlineTypeGuard() && !inlineTypeGuardPatched; } - bool shouldPatchUnconditionalClaspGuard() { - // The clasp guard is only unconditional if the type is known to not + bool shouldPatchUnconditionalShapeGuard() { + // The shape guard is only unconditional if the type is known to not // be an int32. if (idRemat.isTypeKnown() && idRemat.knownType() != JSVAL_TYPE_INT32) - return !inlineClaspGuardPatched; + return !inlineShapeGuardPatched; return false; } void reset() { BasePolyIC::reset(); inlineTypeGuardPatched = false; - inlineClaspGuardPatched = false; + inlineShapeGuardPatched = false; typeRegHasBaseShape = false; hasLastStringStub = false; } @@ -323,11 +323,11 @@ struct SetElementIC : public BaseIC { // Information on how to rematerialize |objReg|. int32 objRemat : MIN_STATE_REMAT_BITS; - // Offset from the start of the fast path to the inline clasp guard. - unsigned inlineClaspGuard : 6; + // Offset from the start of the fast path to the inline shape guard. + unsigned inlineShapeGuard : 6; - // True if the clasp guard has been patched; false otherwise. - bool inlineClaspGuardPatched : 1; + // True if the shape guard has been patched; false otherwise. + bool inlineShapeGuardPatched : 1; // Offset from the start of the fast path to the inline hole guard. unsigned inlineHoleGuard : 8; @@ -361,7 +361,7 @@ struct SetElementIC : public BaseIC { if (execPool != NULL) execPool->release(); execPool = NULL; - inlineClaspGuardPatched = false; + inlineShapeGuardPatched = false; inlineHoleGuardPatched = false; } void purge(Repatcher &repatcher); @@ -490,12 +490,6 @@ struct PICInfo : public BasePolyIC { return !hasTypeCheck(); } -#if !defined JS_HAS_IC_LABELS - static GetPropLabels getPropLabels_; - static SetPropLabels setPropLabels_; - static BindNameLabels bindNameLabels_; - static ScopeNameLabels scopeNameLabels_; -#else union { GetPropLabels getPropLabels_; SetPropLabels setPropLabels_; @@ -518,7 +512,6 @@ struct PICInfo : public BasePolyIC { JS_ASSERT(kind == NAME || kind == CALLNAME || kind == XNAME); scopeNameLabels_ = labels; } -#endif GetPropLabels &getPropLabels() { JS_ASSERT(isGet()); diff --git a/js/src/methodjit/PunboxAssembler.h b/js/src/methodjit/PunboxAssembler.h index 9b59930a9520..ecd1322aee7c 100644 --- a/js/src/methodjit/PunboxAssembler.h +++ b/js/src/methodjit/PunboxAssembler.h @@ -274,8 +274,8 @@ class PunboxAssembler : public JSC::MacroAssembler lshiftPtr(Imm32(1), to); } - void loadObjPrivate(RegisterID base, RegisterID to) { - Address priv(base, offsetof(JSObject, privateData)); + void loadObjPrivate(RegisterID base, RegisterID to, uint32 nfixed) { + Address priv(base, JSObject::getPrivateDataOffset(nfixed)); loadPtr(priv, to); } diff --git a/js/src/methodjit/StubCalls-inl.h b/js/src/methodjit/StubCalls-inl.h index 967296743d3c..e59f8d8177df 100644 --- a/js/src/methodjit/StubCalls-inl.h +++ b/js/src/methodjit/StubCalls-inl.h @@ -73,8 +73,8 @@ ReportAtomNotDefined(JSContext *cx, JSAtom *atom) #define NATIVE_SET(cx,obj,shape,entry,strict,vp) \ JS_BEGIN_MACRO \ if (shape->hasDefaultSetter() && \ - (shape)->slot != SHAPE_INVALID_SLOT && \ - !(obj)->brandedOrHasMethodBarrier()) { \ + (shape)->hasSlot() && \ + !(shape)->isMethod()) { \ /* Fast path for, e.g., plain Object instance properties. */ \ (obj)->nativeSetSlotWithType(cx, shape, *vp); \ } else { \ @@ -87,10 +87,10 @@ ReportAtomNotDefined(JSContext *cx, JSAtom *atom) JS_BEGIN_MACRO \ if (shape->isDataDescriptor() && shape->hasDefaultGetter()) { \ /* Fast path for Object instance properties. */ \ - JS_ASSERT((shape)->slot != SHAPE_INVALID_SLOT || \ + JS_ASSERT((shape)->slot() != SHAPE_INVALID_SLOT || \ !shape->hasDefaultSetter()); \ - if (((shape)->slot != SHAPE_INVALID_SLOT)) \ - *(vp) = (pobj)->nativeGetSlot((shape)->slot); \ + if (((shape)->slot() != SHAPE_INVALID_SLOT)) \ + *(vp) = (pobj)->nativeGetSlot((shape)->slot()); \ else \ (vp)->setUndefined(); \ } else { \ diff --git a/js/src/methodjit/StubCalls.cpp b/js/src/methodjit/StubCalls.cpp index 9b79ab2671d9..f99509f72a9e 100644 --- a/js/src/methodjit/StubCalls.cpp +++ b/js/src/methodjit/StubCalls.cpp @@ -89,7 +89,7 @@ stubs::BindName(VMFrame &f) PropertyCacheEntry *entry; /* Fast-path should have caught this. See comment in interpreter. */ - JS_ASSERT(f.fp()->scopeChain().getParent()); + JS_ASSERT(!f.fp()->scopeChain().isGlobal()); JSAtom *atom; JSObject *obj2; @@ -164,89 +164,30 @@ stubs::SetName(VMFrame &f, JSAtom *origAtom) * know that the entry applies to regs.pc and that obj's shape * matches. * - * The entry predicts either a new property to be added directly to - * obj by this set, or on an existing "own" property, or on a - * prototype property that has a setter. + * The entry predicts a set either an existing "own" property, or + * on a prototype property that has a setter. */ - const Shape *shape = entry->vword.toShape(); + const Shape *shape = entry->prop; JS_ASSERT_IF(shape->isDataDescriptor(), shape->writable()); - JS_ASSERT_IF(shape->hasSlot(), entry->vcapTag() == 0); + JS_ASSERT_IF(shape->hasSlot(), !entry->vindex); - /* - * Fastest path: check whether obj already has the cached shape and - * call NATIVE_SET and break to get out of the do-while(0). But we - * can call NATIVE_SET only for a direct or proto-setter hit. - */ - if (!entry->adding()) { - if (entry->vcapTag() == 0 || - ((obj2 = obj->getProto()) && obj2->shape() == entry->vshape())) - { + if (entry->vindex == 0 || + ((obj2 = obj->getProto()) && obj2->lastProperty() == entry->pshape)) { #ifdef DEBUG - if (entry->directHit()) { - JS_ASSERT(obj->nativeContains(cx, *shape)); - } else { - JS_ASSERT(obj2->nativeContains(cx, *shape)); - JS_ASSERT(entry->vcapTag() == 1); - JS_ASSERT(entry->kshape != entry->vshape()); - JS_ASSERT(!shape->hasSlot()); - } + if (entry->directHit()) { + JS_ASSERT(obj->nativeContains(cx, *shape)); + } else { + JS_ASSERT(obj2->nativeContains(cx, *shape)); + JS_ASSERT(entry->vindex == 1); + JS_ASSERT(entry->kshape != entry->pshape); + JS_ASSERT(!shape->hasSlot()); + } #endif - PCMETER(cache->pchits++); - PCMETER(cache->setpchits++); - NATIVE_SET(cx, obj, shape, entry, strict, &rval); - break; - } - } else { - JS_ASSERT(obj->isExtensible()); - - if (obj->nativeEmpty()) { - if (!obj->ensureClassReservedSlotsForEmptyObject(cx)) - THROW(); - } - - uint32 slot; - if (shape->previous() == obj->lastProperty() && - entry->vshape() == cx->runtime->protoHazardShape && - shape->hasDefaultSetter() && - obj->getClass()->addProperty == JS_PropertyStub) { - slot = shape->slot; - JS_ASSERT(slot == obj->slotSpan()); - - /* - * Fast path: adding a plain old property that was once at - * the frontier of the property tree, whose slot is next to - * claim among the already-allocated slots in obj, where - * shape->table has not been created yet. - */ - PCMETER(cache->pchits++); - PCMETER(cache->addpchits++); - - if (slot < obj->numSlots()) { - JS_ASSERT(obj->getSlot(slot).isUndefined()); - } else { - if (!obj->allocSlot(cx, &slot)) - THROW(); - JS_ASSERT(slot == shape->slot); - } - - /* Simply extend obj's property tree path with shape! */ - obj->extend(cx, shape); - - /* - * No method change check here because here we are adding a - * new property, not updating an existing slot's value that - * might contain a method of a branded shape. - */ - obj->nativeSetSlotWithType(cx, shape, rval); - - /* - * Purge the property cache of the id we may have just - * shadowed in obj's scope and proto chains. - */ - js_PurgeScopeChain(cx, obj, shape->propid); - break; - } + PCMETER(cache->pchits++); + PCMETER(cache->setpchits++); + NATIVE_SET(cx, obj, shape, entry, strict, &rval); + break; } PCMETER(cache->setpcmisses++); @@ -340,7 +281,6 @@ NameOp(VMFrame &f, JSObject *obj, bool callname) { JSContext *cx = f.cx; - const Shape *shape; Value rval; jsid id; @@ -350,17 +290,7 @@ NameOp(VMFrame &f, JSObject *obj, bool callname) JSAtom *atom; JS_PROPERTY_CACHE(cx).test(cx, f.pc(), obj, obj2, entry, atom); if (!atom) { - if (entry->vword.isFunObj()) { - rval.setObject(entry->vword.toFunObj()); - } else if (entry->vword.isSlot()) { - uintN slot = entry->vword.toSlot(); - rval = obj2->nativeGetSlot(slot); - } else { - JS_ASSERT(entry->vword.isShape()); - shape = entry->vword.toShape(); - NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval, return NULL); - } - + NATIVE_GET(cx, obj, obj2, entry->prop, JSGET_METHOD_BARRIER, &rval, return NULL); JS_ASSERT(obj->isGlobal() || IsCacheableNonGlobalScope(obj)); } else { id = ATOM_TO_JSID(atom); @@ -385,7 +315,7 @@ NameOp(VMFrame &f, JSObject *obj, bool callname) if (!obj->getGeneric(cx, id, &rval)) return NULL; } else { - shape = (Shape *)prop; + Shape *shape = (Shape *)prop; JSObject *normalized = obj; if (normalized->isWith() && !shape->hasDefaultGetter()) normalized = js_UnwrapWithObject(cx, normalized); @@ -761,8 +691,8 @@ stubs::DefFun(VMFrame &f, JSFunction *fun) * windows, and user-defined JS functions precompiled and then shared among * requests in server-side JS. */ - if (obj->getParent() != obj2) { - obj = CloneFunctionObject(cx, fun, obj2, true); + if (obj->toFunction()->environment() != obj2) { + obj = CloneFunctionObjectIfNotSingleton(cx, fun, obj2); if (!obj) THROW(); JS_ASSERT_IF(f.script()->compileAndGo, obj->getGlobal() == fun->getGlobal()); @@ -948,7 +878,6 @@ StubEqualityOp(VMFrame &f) cond = JSDOUBLE_COMPARE(l, !=, r, IFNAN); } else if (lval.isObject()) { JSObject *l = &lval.toObject(), *r = &rval.toObject(); - l->assertSpecialEqualitySynced(); if (JSEqualityOp eq = l->getClass()->ext.equality) { if (!eq(cx, l, &rval, &cond)) return false; @@ -1325,7 +1254,7 @@ stubs::NewInitObject(VMFrame &f, JSObject *baseobj) TypeObject *type = (TypeObject *) f.scratch; if (!baseobj) { - gc::AllocKind kind = GuessObjectGCKind(0, false); + gc::AllocKind kind = GuessObjectGCKind(0); JSObject *obj = NewBuiltinClassInstance(cx, &ObjectClass, kind); if (!obj) THROW(); @@ -1406,7 +1335,7 @@ stubs::DefLocalFun(VMFrame &f, JSFunction *fun) JSObject *obj = fun; if (fun->isNullClosure()) { - obj = CloneFunctionObject(f.cx, fun, &f.fp()->scopeChain(), true); + obj = CloneFunctionObjectIfNotSingleton(f.cx, fun, &f.fp()->scopeChain()); if (!obj) THROWV(NULL); } else { @@ -1415,8 +1344,8 @@ stubs::DefLocalFun(VMFrame &f, JSFunction *fun) if (!parent) THROWV(NULL); - if (obj->getParent() != parent) { - obj = CloneFunctionObject(f.cx, fun, parent, true); + if (obj->toFunction()->environment() != parent) { + obj = CloneFunctionObjectIfNotSingleton(f.cx, fun, parent); if (!obj) THROWV(NULL); } @@ -1456,9 +1385,9 @@ stubs::RegExp(VMFrame &f, JSObject *regex) JSObject * JS_FASTCALL stubs::LambdaJoinableForInit(VMFrame &f, JSFunction *fun) { - jsbytecode *nextpc = (jsbytecode *) f.scratch; + DebugOnly nextpc = (jsbytecode *) f.scratch; JS_ASSERT(fun->joinable()); - fun->setMethodAtom(f.fp()->script()->getAtom(GET_SLOTNO(nextpc))); + JS_ASSERT(fun->methodAtom() == f.script()->getAtom(GET_SLOTNO(nextpc))); return fun; } @@ -1466,10 +1395,10 @@ JSObject * JS_FASTCALL stubs::LambdaJoinableForSet(VMFrame &f, JSFunction *fun) { JS_ASSERT(fun->joinable()); - jsbytecode *nextpc = (jsbytecode *) f.scratch; + DebugOnly nextpc = (jsbytecode *) f.scratch; const Value &lref = f.regs.sp[-1]; if (lref.isObject() && lref.toObject().canHaveMethodBarrier()) { - fun->setMethodAtom(f.fp()->script()->getAtom(GET_SLOTNO(nextpc))); + JS_ASSERT(fun->methodAtom() == f.script()->getAtom(GET_SLOTNO(nextpc))); return fun; } return Lambda(f, fun); @@ -1495,11 +1424,10 @@ stubs::LambdaJoinableForCall(VMFrame &f, JSFunction *fun) * for this JSOP_CALL. */ const Value &cref = f.regs.sp[1 - (iargc + 2)]; - JSObject *callee; + JSFunction *callee; if (IsFunctionObject(cref, &callee)) { - JSFunction *calleeFun = callee->getFunctionPrivate(); - Native native = calleeFun->maybeNative(); + Native native = callee->maybeNative(); if (native) { if (iargc == 1 && native == array_sort) @@ -1530,7 +1458,7 @@ stubs::Lambda(VMFrame &f, JSFunction *fun) THROWV(NULL); } - JSObject *obj = CloneFunctionObject(f.cx, fun, parent, true); + JSObject *obj = CloneFunctionObjectIfNotSingleton(f.cx, fun, parent); if (!obj) THROWV(NULL); @@ -1570,16 +1498,7 @@ InlineGetProp(VMFrame &f) JSAtom *atom; JS_PROPERTY_CACHE(cx).test(cx, f.pc(), aobj, obj2, entry, atom); if (!atom) { - if (entry->vword.isFunObj()) { - rval.setObject(entry->vword.toFunObj()); - } else if (entry->vword.isSlot()) { - uint32 slot = entry->vword.toSlot(); - rval = obj2->nativeGetSlot(slot); - } else { - JS_ASSERT(entry->vword.isShape()); - const Shape *shape = entry->vword.toShape(); - NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval, return false); - } + NATIVE_GET(cx, obj, obj2, entry->prop, JSGET_METHOD_BARRIER, &rval, return false); break; } @@ -1659,17 +1578,8 @@ stubs::CallProp(VMFrame &f, JSAtom *origAtom) JSAtom *atom; JS_PROPERTY_CACHE(cx).test(cx, f.pc(), aobj, obj2, entry, atom); if (!atom) { - if (entry->vword.isFunObj()) { - rval.setObject(entry->vword.toFunObj()); - } else if (entry->vword.isSlot()) { - uint32 slot = entry->vword.toSlot(); - rval = obj2->nativeGetSlot(slot); - } else { - JS_ASSERT(entry->vword.isShape()); - const Shape *shape = entry->vword.toShape(); - NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval, - THROW()); - } + NATIVE_GET(cx, &objv.toObject(), obj2, entry->prop, JSGET_NO_METHOD_BARRIER, &rval, + THROW()); regs.sp++; regs.sp[-2] = rval; regs.sp[-1] = lval; @@ -1725,7 +1635,6 @@ static void InitPropOrMethod(VMFrame &f, JSAtom *atom, JSOp op) { JSContext *cx = f.cx; - JSRuntime *rt = cx->runtime; FrameRegs ®s = f.regs; /* Load the property's initial value into rval. */ @@ -1749,35 +1658,14 @@ InitPropOrMethod(VMFrame &f, JSAtom *atom, JSOp op) * repeated property name. The fast path does not handle these two cases. */ PropertyCacheEntry *entry; - const Shape *shape; - if (JS_PROPERTY_CACHE(cx).testForInit(rt, f.pc(), obj, &shape, &entry) && - shape->hasDefaultSetter() && - shape->previous() == obj->lastProperty()) - { + JSObject *obj2; + JSAtom *atom2; + if (JS_PROPERTY_CACHE(cx).testForSet(cx, f.pc(), obj, &entry, &obj2, &atom2) && + entry->prop->hasDefaultSetter() && + entry->vindex == 0) { + JS_ASSERT(obj == obj2); /* Fast path. Property cache hit. */ - uint32 slot = shape->slot; - - JS_ASSERT(slot == obj->slotSpan()); - JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass())); - if (slot < obj->numSlots()) { - JS_ASSERT(obj->getSlot(slot).isUndefined()); - } else { - if (!obj->allocSlot(cx, &slot)) - THROW(); - JS_ASSERT(slot == shape->slot); - } - - /* A new object, or one we just extended in a recent initprop op. */ - JS_ASSERT(!obj->lastProperty() || - obj->shape() == obj->lastProperty()->shapeid); - obj->extend(cx, shape); - - /* - * No method change check here because here we are adding a new - * property, not updating an existing slot's value that might - * contain a method of a branded shape. - */ - obj->nativeSetSlotWithType(cx, shape, rval); + obj->nativeSetSlotWithType(cx, entry->prop, rval); } else { PCMETER(JS_PROPERTY_CACHE(cx).inipcmisses++); @@ -1970,13 +1858,13 @@ stubs::EnterBlock(VMFrame &f, JSObject *obj) */ JSObject *obj2 = &fp->scopeChain(); while (obj2->isWith()) - obj2 = obj2->getParent(); + obj2 = obj2->internalScopeChain(); if (obj2->isBlock() && obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, fp)) { JSObject *youngestProto = obj2->getProto(); JS_ASSERT(youngestProto->isStaticBlock()); JSObject *parent = obj; - while ((parent = parent->getParent()) != youngestProto) + while ((parent = parent->scopeChain()) != youngestProto) JS_ASSERT(parent); } #endif @@ -2136,30 +2024,6 @@ finally: return native; } -void JS_FASTCALL -stubs::Unbrand(VMFrame &f) -{ - const Value &thisv = f.regs.sp[-1]; - if (!thisv.isObject()) - return; - JSObject *obj = &thisv.toObject(); - if (obj->isNative()) - obj->unbrand(f.cx); -} - -void JS_FASTCALL -stubs::UnbrandThis(VMFrame &f) -{ - if (!ComputeThis(f.cx, f.fp())) - THROW(); - Value &thisv = f.fp()->thisValue(); - if (!thisv.isObject()) - return; - JSObject *obj = &thisv.toObject(); - if (obj->isNative()) - obj->unbrand(f.cx); -} - void JS_FASTCALL stubs::Pos(VMFrame &f) { @@ -2487,9 +2351,15 @@ stubs::FunctionFrameEpilogue(VMFrame &f) void JS_FASTCALL stubs::AnyFrameEpilogue(VMFrame &f) { + /* + * On the normal execution path, emitReturn inlines ScriptEpilogue. + * This function implements forced early returns, so it must have the + * same effect. + */ + if (!ScriptEpilogue(f.cx, f.fp(), true)) + THROW(); if (f.fp()->isNonEvalFunctionFrame()) f.fp()->functionEpilogue(); - stubs::ScriptDebugEpilogue(f); } template diff --git a/js/src/methodjit/StubCalls.h b/js/src/methodjit/StubCalls.h index a75bb01f6243..17ec5bea2887 100644 --- a/js/src/methodjit/StubCalls.h +++ b/js/src/methodjit/StubCalls.h @@ -90,13 +90,11 @@ void JS_FASTCALL ScriptProbeOnlyEpilogue(VMFrame &f); * to JM native code. Then all fields are non-NULL. */ struct UncachedCallResult { - JSObject *callee; // callee object JSFunction *fun; // callee function void *codeAddr; // code address of compiled callee function bool unjittable; // did we try to JIT and fail? void init() { - callee = NULL; fun = NULL; codeAddr = NULL; unjittable = false; @@ -192,8 +190,6 @@ JSBool JS_FASTCALL ValueToBoolean(VMFrame &f); JSString * JS_FASTCALL TypeOf(VMFrame &f); JSBool JS_FASTCALL InstanceOf(VMFrame &f); void JS_FASTCALL FastInstanceOf(VMFrame &f); -void JS_FASTCALL Unbrand(VMFrame &f); -void JS_FASTCALL UnbrandThis(VMFrame &f); /* * Helper for triggering recompilation should a name read miss a type barrier, diff --git a/js/src/perf/jsperf.cpp b/js/src/perf/jsperf.cpp index 2d573bde3407..44348724f562 100644 --- a/js/src/perf/jsperf.cpp +++ b/js/src/perf/jsperf.cpp @@ -40,6 +40,8 @@ #include "jscntxt.h" /* for error messages */ #include "jsobj.h" /* for unwrapping without a context */ +#include "jsobjinlines.h" + using JS::PerfMeasurement; // You cannot forward-declare a static object in C++, so instead diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index 61978e087a59..b3ee8365d29a 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -1921,7 +1921,7 @@ SrcNotes(JSContext *cx, JSScript *script, Sprinter *sp) case SRC_FUNCDEF: { uint32 index = js_GetSrcNoteOffset(sn, 0); JSObject *obj = script->getObject(index); - JSFunction *fun = (JSFunction *) JS_GetPrivate(cx, obj); + JSFunction *fun = obj->toFunction(); JSString *str = JS_DecompileFunction(cx, fun, JS_DONT_PRETTY_PRINT); JSAutoByteString bytes; if (!str || !bytes.encode(cx, str)) @@ -2063,7 +2063,7 @@ DisassembleScript(JSContext *cx, JSScript *script, JSFunction *fun, bool lines, JSObject *obj = objects->vector[i]; if (obj->isFunction()) { Sprint(sp, "\n"); - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); JSScript *nested = fun->maybeScript(); if (!DisassembleScript(cx, nested, fun, lines, recursive, sp)) return false; @@ -2513,7 +2513,7 @@ DumpStack(JSContext *cx, uintN argc, Value *vp) return false; StackIter iter(cx); - JS_ASSERT(iter.nativeArgs().callee().getFunctionPrivate()->native() == DumpStack); + JS_ASSERT(iter.nativeArgs().callee().toFunction()->native() == DumpStack); ++iter; uint32 index = 0; @@ -2742,7 +2742,7 @@ Clone(JSContext *cx, uintN argc, jsval *vp) } } if (funobj->compartment() != cx->compartment) { - JSFunction *fun = funobj->getFunctionPrivate(); + JSFunction *fun = funobj->toFunction(); if (fun->isInterpreted() && fun->script()->compileAndGo) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_UNEXPECTED_TYPE, "function", "compile-and-go"); @@ -3109,11 +3109,7 @@ ShapeOf(JSContext *cx, uintN argc, jsval *vp) *vp = JSVAL_ZERO; return JS_TRUE; } - if (!obj->isNative()) { - *vp = INT_TO_JSVAL(-1); - return JS_TRUE; - } - return JS_NewNumberValue(cx, obj->shape(), vp); + return JS_NewNumberValue(cx, (double) ((jsuword)obj->lastProperty() >> 3), vp); } /* @@ -3143,7 +3139,7 @@ CopyProperty(JSContext *cx, JSObject *obj, JSObject *referent, jsid id, if (!shape) return false; } else if (shape->hasSlot()) { - desc.value = referent->nativeGetSlot(shape->slot); + desc.value = referent->nativeGetSlot(shape->slot()); } else { desc.value.setUndefined(); } @@ -3155,7 +3151,7 @@ CopyProperty(JSContext *cx, JSObject *obj, JSObject *referent, jsid id, desc.setter = shape->setter(); if (!desc.setter && !(desc.attrs & JSPROP_SETTER)) desc.setter = JS_StrictPropertyStub; - desc.shortid = shape->shortid; + desc.shortid = shape->shortid(); propFlags = shape->getFlags(); } else if (IsProxy(referent)) { PropertyDescriptor desc; diff --git a/js/src/shell/jsheaptools.cpp b/js/src/shell/jsheaptools.cpp index e596171a4c02..c2714be4228d 100644 --- a/js/src/shell/jsheaptools.cpp +++ b/js/src/shell/jsheaptools.cpp @@ -50,6 +50,8 @@ #include "jsprf.h" #include "jsutil.h" +#include "jsobjinlines.h" + using namespace js; #ifdef DEBUG diff --git a/js/src/shell/jsworkers.cpp b/js/src/shell/jsworkers.cpp index 49d552f1bf4b..0732fb0046f3 100644 --- a/js/src/shell/jsworkers.cpp +++ b/js/src/shell/jsworkers.cpp @@ -663,20 +663,25 @@ class Worker MOZ_FINAL : public WorkerParent // alive, this postMessage function cannot be called after the Worker // is collected. Therefore it's safe to stash a pointer (a weak // reference) to the C++ Worker object in the reserved slot. - post = JS_GetFunctionObject(JS_DefineFunction(context, global, "postMessage", - (JSNative) jsPostMessageToParent, 1, 0)); - if (!post || !JS_SetReservedSlot(context, post, 0, PRIVATE_TO_JSVAL(this))) + post = JS_GetFunctionObject( + js::DefineFunctionWithReserved(context, global, "postMessage", + (JSNative) jsPostMessageToParent, 1, 0)); + if (!post) goto bad; - proto = JS_InitClass(context, global, NULL, &jsWorkerClass, jsConstruct, 1, - NULL, jsMethods, NULL, NULL); + js::SetFunctionNativeReserved(post, 0, PRIVATE_TO_JSVAL(this)); + + proto = js::InitClassWithReserved(context, global, NULL, &jsWorkerClass, jsConstruct, 1, + NULL, jsMethods, NULL, NULL); if (!proto) goto bad; ctor = JS_GetConstructor(context, proto); - if (!ctor || !JS_SetReservedSlot(context, ctor, 0, PRIVATE_TO_JSVAL(this))) + if (!ctor) goto bad; + js::SetFunctionNativeReserved(post, 0, PRIVATE_TO_JSVAL(this)); + JS_EndRequest(context); JS_ClearContextThread(context); return true; @@ -833,23 +838,17 @@ class Worker MOZ_FINAL : public WorkerParent } static bool getWorkerParentFromConstructor(JSContext *cx, JSObject *ctor, WorkerParent **p) { - jsval v; - if (!JS_GetReservedSlot(cx, ctor, 0, &v)) - return false; + jsval v = js::GetFunctionNativeReserved(ctor, 0); if (JSVAL_IS_VOID(v)) { // This means ctor is the root Worker constructor (created in // Worker::initWorkers as opposed to Worker::createContext, which sets up // Worker sandboxes) and nothing is initialized yet. - if (!JS_GetReservedSlot(cx, ctor, 1, &v)) - return false; + v = js::GetFunctionNativeReserved(ctor, 1); ThreadPool *threadPool = (ThreadPool *) JSVAL_TO_PRIVATE(v); if (!threadPool->start(cx)) return false; WorkerParent *parent = threadPool->getMainQueue(); - if (!JS_SetReservedSlot(cx, ctor, 0, PRIVATE_TO_JSVAL(parent))) { - threadPool->shutdown(cx); - return false; - } + js::SetFunctionNativeReserved(ctor, 0, PRIVATE_TO_JSVAL(parent)); *p = parent; return true; } @@ -888,17 +887,16 @@ class Worker MOZ_FINAL : public WorkerParent *objp = threadPool->asObject(); // Create the Worker constructor. - JSObject *proto = JS_InitClass(cx, global, NULL, &jsWorkerClass, - jsConstruct, 1, - NULL, jsMethods, NULL, NULL); + JSObject *proto = js::InitClassWithReserved(cx, global, NULL, &jsWorkerClass, + jsConstruct, 1, + NULL, jsMethods, NULL, NULL); if (!proto) return NULL; // Stash a pointer to the ThreadPool in constructor reserved slot 1. // It will be used later when lazily creating the MainQueue. JSObject *ctor = JS_GetConstructor(cx, proto); - if (!JS_SetReservedSlot(cx, ctor, 1, PRIVATE_TO_JSVAL(threadPool))) - return NULL; + js::SetFunctionNativeReserved(ctor, 1, PRIVATE_TO_JSVAL(threadPool)); return threadPool; } @@ -1192,9 +1190,7 @@ Worker::processOneEvent() JSBool Worker::jsPostMessageToParent(JSContext *cx, uintN argc, jsval *vp) { - jsval workerval; - if (!JS_GetReservedSlot(cx, JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)), 0, &workerval)) - return false; + jsval workerval = js::GetFunctionNativeReserved(JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)), 0); Worker *w = (Worker *) JSVAL_TO_PRIVATE(workerval); { diff --git a/js/src/tests/js1_8_5/regress/jstests.list b/js/src/tests/js1_8_5/regress/jstests.list index 321a6dba8f96..ce7af298614c 100644 --- a/js/src/tests/js1_8_5/regress/jstests.list +++ b/js/src/tests/js1_8_5/regress/jstests.list @@ -1,6 +1,9 @@ url-prefix ../../jsreftest.html?test=js1_8_5/regress/ script no-array-comprehension-length-limit.js +script regress-373843.js script regress-383902.js +script regress-469758.js +require-or(debugMode,skip) script regress-476088.js script regress-500528.js script regress-533876.js script regress-541255-0.js diff --git a/js/src/tests/js1_8_5/regress/regress-373843.js b/js/src/tests/js1_8_5/regress/regress-373843.js new file mode 100644 index 000000000000..677e33353422 --- /dev/null +++ b/js/src/tests/js1_8_5/regress/regress-373843.js @@ -0,0 +1,13 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + +if (typeof disassemble != 'undefined') +{ + var func = disassemble(function() { return "c\\d"; }) + + // The disassembled function will contain a bytecode "string" with the content of the string next to it. + // Check if that string isn't over-escaped i.e. \\ isn't escaped to \\\\ . + assertEq(func.indexOf("\\\\\\\\"), -1) +} + +reportCompare(0, 0, 'ok'); diff --git a/js/src/tests/js1_8_5/regress/regress-469758.js b/js/src/tests/js1_8_5/regress/regress-469758.js new file mode 100644 index 000000000000..27fa1ed8a751 --- /dev/null +++ b/js/src/tests/js1_8_5/regress/regress-469758.js @@ -0,0 +1,14 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + +var err; +try { + {let i=1} + {let j=1; [][j][2]} +} catch (e) { + err = e; +} +assertEq(err instanceof TypeError, true); +assertEq(err.message, "[][j] is undefined"); + +reportCompare(0, 0, 'ok'); diff --git a/js/src/tests/js1_8_5/regress/regress-476088.js b/js/src/tests/js1_8_5/regress/regress-476088.js new file mode 100644 index 000000000000..b01f0c85f5f6 --- /dev/null +++ b/js/src/tests/js1_8_5/regress/regress-476088.js @@ -0,0 +1,17 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/licenses/publicdomain/ + + +var err; +try { + f = function() { var x; x.y; } + trap(f, 0, ""); + f(); +} catch (e) { + err = e; +} +assertEq(err instanceof TypeError, true); +assertEq(err.message, "x is undefined") + +reportCompare(0, 0, 'ok'); + diff --git a/js/src/vm/ArgumentsObject-inl.h b/js/src/vm/ArgumentsObject-inl.h index c50967d7f583..448ec967c78d 100644 --- a/js/src/vm/ArgumentsObject-inl.h +++ b/js/src/vm/ArgumentsObject-inl.h @@ -48,16 +48,16 @@ namespace js { inline void ArgumentsObject::initInitialLength(uint32 length) { - JS_ASSERT(getSlot(INITIAL_LENGTH_SLOT).isUndefined()); - initSlot(INITIAL_LENGTH_SLOT, Int32Value(length << PACKED_BITS_COUNT)); - JS_ASSERT((getSlot(INITIAL_LENGTH_SLOT).toInt32() >> PACKED_BITS_COUNT) == int32(length)); + JS_ASSERT(getFixedSlot(INITIAL_LENGTH_SLOT).isUndefined()); + initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(length << PACKED_BITS_COUNT)); + JS_ASSERT((getFixedSlot(INITIAL_LENGTH_SLOT).toInt32() >> PACKED_BITS_COUNT) == int32(length)); JS_ASSERT(!hasOverriddenLength()); } inline uint32 ArgumentsObject::initialLength() const { - uint32 argc = uint32(getSlot(INITIAL_LENGTH_SLOT).toInt32()) >> PACKED_BITS_COUNT; + uint32 argc = uint32(getFixedSlot(INITIAL_LENGTH_SLOT).toInt32()) >> PACKED_BITS_COUNT; JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX); return argc; } @@ -65,28 +65,28 @@ ArgumentsObject::initialLength() const inline void ArgumentsObject::markLengthOverridden() { - uint32 v = getSlot(INITIAL_LENGTH_SLOT).toInt32() | LENGTH_OVERRIDDEN_BIT; - setSlot(INITIAL_LENGTH_SLOT, Int32Value(v)); + uint32 v = getFixedSlot(INITIAL_LENGTH_SLOT).toInt32() | LENGTH_OVERRIDDEN_BIT; + setFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(v)); } inline bool ArgumentsObject::hasOverriddenLength() const { - const js::Value &v = getSlot(INITIAL_LENGTH_SLOT); + const js::Value &v = getFixedSlot(INITIAL_LENGTH_SLOT); return v.toInt32() & LENGTH_OVERRIDDEN_BIT; } inline void ArgumentsObject::initData(ArgumentsData *data) { - JS_ASSERT(getSlot(DATA_SLOT).isUndefined()); - initSlot(DATA_SLOT, PrivateValue(data)); + JS_ASSERT(getFixedSlot(DATA_SLOT).isUndefined()); + initFixedSlot(DATA_SLOT, PrivateValue(data)); } inline ArgumentsData * ArgumentsObject::data() const { - return reinterpret_cast(getSlot(DATA_SLOT).toPrivate()); + return reinterpret_cast(getFixedSlot(DATA_SLOT).toPrivate()); } inline const js::Value & @@ -112,13 +112,13 @@ ArgumentsObject::setElement(uint32 i, const js::Value &v) inline js::StackFrame * ArgumentsObject::maybeStackFrame() const { - return reinterpret_cast(getPrivate()); + return reinterpret_cast(getFixedSlot(STACK_FRAME_SLOT).toPrivate()); } inline void ArgumentsObject::setStackFrame(StackFrame *frame) { - return setPrivate(frame); + setFixedSlot(STACK_FRAME_SLOT, PrivateValue(frame)); } inline const js::Value & diff --git a/js/src/vm/ArgumentsObject.h b/js/src/vm/ArgumentsObject.h index ee78feb1bae2..b3e91f048e68 100644 --- a/js/src/vm/ArgumentsObject.h +++ b/js/src/vm/ArgumentsObject.h @@ -131,14 +131,25 @@ struct ArgumentsData * the ArgumentsData. If you're simply looking to get arguments[i], * however, use getElement or getElements to avoid spreading arguments * object implementation details around too much. + * STACK_FRAME_SLOT + * Stores the function's stack frame for non-strict arguments objects until + * the function returns, when it is replaced with null. When an arguments + * object is created on-trace its private is JS_ARGUMENTS_OBJECT_ON_TRACE, + * and when the trace exits its private is replaced with the stack frame or + * null, as appropriate. This slot is used by strict arguments objects as + * well, but the slot is always null. Conceptually it would be better to + * remove this oddity, but preserving it allows us to work with arguments + * objects of either kind more abstractly, so we keep it for now. */ class ArgumentsObject : public ::JSObject { static const uint32 INITIAL_LENGTH_SLOT = 0; static const uint32 DATA_SLOT = 1; + static const uint32 STACK_FRAME_SLOT = 2; public: - static const uint32 RESERVED_SLOTS = 2; + static const uint32 RESERVED_SLOTS = 3; + static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4; private: /* Lower-order bit stolen from the length slot. */ @@ -159,8 +170,9 @@ class ArgumentsObject : public ::JSObject void initData(ArgumentsData *data); public: - /* Create an arguments object for the given callee function. */ - static ArgumentsObject *create(JSContext *cx, uint32 argc, JSObject &callee); + /* Create an arguments object for the given callee function and frame. */ + static ArgumentsObject *create(JSContext *cx, uint32 argc, JSObject &callee, + StackFrame *fp); /* * Return the initial length of the arguments. This may differ from the @@ -204,16 +216,12 @@ class ArgumentsObject : public ::JSObject inline void setStackFrame(js::StackFrame *frame); }; -/* - * Non-strict arguments have a private: the function's stack frame until the - * function returns, when it is replaced with null. - */ class NormalArgumentsObject : public ArgumentsObject { friend bool JSObject::isNormalArguments() const; friend struct EmptyShape; // for EmptyShape::getEmptyArgumentsShape friend ArgumentsObject * - ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee); + ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee, StackFrame *fp); public: /* @@ -226,17 +234,11 @@ class NormalArgumentsObject : public ArgumentsObject inline void clearCallee(); }; -/* - * Technically strict arguments have a private, but it's always null. - * Conceptually it would be better to remove this oddity, but preserving it - * allows us to work with arguments objects of either kind more abstractly, - * so we keep it for now. - */ class StrictArgumentsObject : public ArgumentsObject { friend bool JSObject::isStrictArguments() const; friend ArgumentsObject * - ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee); + ArgumentsObject::create(JSContext *cx, uint32 argc, JSObject &callee, StackFrame *fp); }; } // namespace js diff --git a/js/src/vm/BooleanObject-inl.h b/js/src/vm/BooleanObject-inl.h index 1472394b3cce..a6bbadb49289 100644 --- a/js/src/vm/BooleanObject-inl.h +++ b/js/src/vm/BooleanObject-inl.h @@ -68,7 +68,7 @@ BooleanObject::create(JSContext *cx, bool b) inline BooleanObject * BooleanObject::createWithProto(JSContext *cx, bool b, JSObject &proto) { - JSObject *obj = NewObjectWithClassProto(cx, &BooleanClass, &proto, + JSObject *obj = NewObjectWithClassProto(cx, &BooleanClass, &proto, NULL, gc::GetGCObjectKind(RESERVED_SLOTS)); if (!obj) return NULL; diff --git a/js/src/vm/CallObject-inl.h b/js/src/vm/CallObject-inl.h index 5cfd7371c21f..71adce2a963c 100644 --- a/js/src/vm/CallObject-inl.h +++ b/js/src/vm/CallObject-inl.h @@ -89,7 +89,7 @@ CallObject::getCallee() const inline JSFunction * CallObject::getCalleeFunction() const { - return getFixedSlot(CALLEE_SLOT).toObject().getFunctionPrivate(); + return getFixedSlot(CALLEE_SLOT).toObject().toFunction(); } inline const js::Value & @@ -164,7 +164,7 @@ CallObject::initVarUnchecked(uintN i, const js::Value &v) inline void CallObject::copyValues(uintN nargs, Value *argv, uintN nvars, Value *slots) { - JS_ASSERT(numSlots() >= RESERVED_SLOTS + nargs + nvars); + JS_ASSERT(slotInRange(RESERVED_SLOTS + nargs + nvars, SENTINEL_ALLOWED)); copySlotRange(RESERVED_SLOTS, argv, nargs, true); copySlotRange(RESERVED_SLOTS + nargs, slots, nvars, true); } diff --git a/js/src/vm/CallObject.cpp b/js/src/vm/CallObject.cpp index 21b8ea5fd677..778159a55ad6 100644 --- a/js/src/vm/CallObject.cpp +++ b/js/src/vm/CallObject.cpp @@ -55,43 +55,55 @@ CallObject * CallObject::create(JSContext *cx, JSScript *script, JSObject &scopeChain, JSObject *callee) { Bindings &bindings = script->bindings; - size_t argsVars = bindings.countArgsAndVars(); - size_t slots = RESERVED_SLOTS + argsVars; - gc::AllocKind kind = gc::GetGCObjectKind(slots); + gc::AllocKind kind = gc::GetGCObjectKind(bindings.lastShape()->numFixedSlots() + 1); - /* - * Make sure that the arguments and variables in the call object all end up - * in a contiguous range of slots. We need this to be able to embed the - * args/vars arrays in the TypeScriptNesting for the function, after the - * call object's frame has finished. - */ - if (cx->typeInferenceEnabled() && gc::GetGCKindSlots(kind) < slots) { - kind = gc::GetGCObjectKind(RESERVED_SLOTS); - JS_ASSERT(gc::GetGCKindSlots(kind) == RESERVED_SLOTS); - } + js::types::TypeObject *type = cx->compartment->getEmptyType(cx); + if (!type) + return NULL; - JSObject *obj = js_NewGCObject(cx, kind); + HeapValue *slots; + if (!PreallocateObjectDynamicSlots(cx, bindings.lastShape(), &slots)) + return NULL; + + JSObject *obj = JSObject::create(cx, kind, bindings.lastShape(), type, slots); if (!obj) return NULL; - /* Init immediately to avoid GC seeing a half-init'ed object. */ - obj->initCall(cx, bindings, &scopeChain); - obj->makeVarObj(); - - /* This must come after callobj->lastProp has been set. */ - if (!obj->ensureInstanceReservedSlots(cx, argsVars)) - return NULL; + /* + * Update the parent for bindings associated with non-compileAndGo scripts, + * whose call objects do not have a consistent global variable and need + * to be updated dynamically. + */ + JSObject *global = scopeChain.getGlobal(); + if (global != obj->getParent()) { + JS_ASSERT(obj->getParent() == NULL); + if (!obj->setParent(cx, global)) + return NULL; + } #ifdef DEBUG for (Shape::Range r = obj->lastProperty(); !r.empty(); r.popFront()) { const Shape &s = r.front(); - if (s.slot != SHAPE_INVALID_SLOT) { - JS_ASSERT(s.slot + 1 == obj->slotSpan()); + if (s.hasSlot()) { + JS_ASSERT(s.slot() + 1 == obj->slotSpan()); break; } } #endif + JS_ASSERT(obj->isCall()); + JS_ASSERT(!obj->inDictionaryMode()); + + if (!obj->setInternalScopeChain(cx, &scopeChain)) + return NULL; + + /* + * If |bindings| is for a function that has extensible parents, that means + * its Call should have its own shape; see js::BaseShape::extensibleParents. + */ + if (obj->lastProperty()->extensibleParents() && !obj->generateOwnShape(cx)) + return NULL; + CallObject &callobj = obj->asCall(); callobj.initCallee(callee); return &callobj; diff --git a/js/src/vm/CallObject.h b/js/src/vm/CallObject.h index ff745d8e153f..73d6b4320267 100644 --- a/js/src/vm/CallObject.h +++ b/js/src/vm/CallObject.h @@ -48,24 +48,31 @@ class CallObject : public ::JSObject /* * Reserved slot structure for Call objects: * - * private - the stack frame corresponding to the Call object - * until js_PutCallObject or its on-trace analog - * is called, null thereafter - * JSSLOT_CALL_CALLEE - callee function for the stack frame, or null if - * the stack frame is for strict mode eval code - * JSSLOT_CALL_ARGUMENTS - arguments object for non-strict mode eval stack - * frames (not valid for strict mode eval frames) + * SCOPE_CHAIN_SLOT - The enclosing scope. This must come first, for + * JSObject::scopeParent. + * CALLEE_SLOT - Callee function for the stack frame, or null if + * the stack frame is for strict mode eval code. + * ARGUMENTS_SLOT - Arguments object for non-strict mode eval stack + * frames (not valid for strict mode eval frames). + * private - The stack frame corresponding to the Call object + * until js_PutCallObject or its on-trace analog + * is called, null thereafter. + * + * DeclEnv objects use SCOPE_CHAIN_SLOT and private in the same fashion. */ - static const uintN CALLEE_SLOT = 0; - static const uintN ARGUMENTS_SLOT = 1; + static const uint32 CALLEE_SLOT = 1; + static const uint32 ARGUMENTS_SLOT = 2; public: - static const uintN RESERVED_SLOTS = 2; - /* Create a CallObject for the given callee function. */ static CallObject * create(JSContext *cx, JSScript *script, JSObject &scopeChain, JSObject *callee); + static const uint32 RESERVED_SLOTS = 3; + + static const uint32 DECL_ENV_RESERVED_SLOTS = 1; + static const gc::AllocKind DECL_ENV_FINALIZE_KIND = gc::FINALIZE_OBJECT2; + /* True if this is for a strict mode eval frame or for a function call. */ inline bool isForEval() const; diff --git a/js/src/vm/Debugger.cpp b/js/src/vm/Debugger.cpp index d2928e13edae..5a8e06ee465e 100644 --- a/js/src/vm/Debugger.cpp +++ b/js/src/vm/Debugger.cpp @@ -366,8 +366,8 @@ Debugger::getScriptFrame(JSContext *cx, StackFrame *fp, Value *vp) /* Create and populate the Debugger.Frame object. */ JSObject *proto = &object->getReservedSlot(JSSLOT_DEBUG_FRAME_PROTO).toObject(); JSObject *frameobj = - NewNonFunction(cx, &DebuggerFrame_class, proto, NULL); - if (!frameobj || !frameobj->ensureClassReservedSlots(cx)) + NewObjectWithGivenProto(cx, &DebuggerFrame_class, proto, NULL); + if (!frameobj) return false; frameobj->setPrivate(fp); frameobj->setReservedSlot(JSSLOT_DEBUGFRAME_OWNER, ObjectValue(*object)); @@ -495,8 +495,8 @@ Debugger::wrapDebuggeeValue(JSContext *cx, Value *vp) /* Create a new Debugger.Object for obj. */ JSObject *proto = &object->getReservedSlot(JSSLOT_DEBUG_OBJECT_PROTO).toObject(); JSObject *dobj = - NewNonFunction(cx, &DebuggerObject_class, proto, NULL); - if (!dobj || !dobj->ensureClassReservedSlots(cx)) + NewObjectWithGivenProto(cx, &DebuggerObject_class, proto, NULL); + if (!dobj) return false; dobj->setPrivate(obj); dobj->setReservedSlot(JSSLOT_DEBUGOBJECT_OWNER, ObjectValue(*object)); @@ -627,7 +627,7 @@ Debugger::parseResumptionValue(AutoCompartment &ac, bool ok, const Value &rv, Va shape = obj->lastProperty(); okResumption = shape->previous() && !shape->previous()->previous() && - (shape->propid == returnId || shape->propid == throwId) && + (shape->propid() == returnId || shape->propid() == throwId) && shape->isDataDescriptor(); } if (!okResumption) { @@ -643,7 +643,7 @@ Debugger::parseResumptionValue(AutoCompartment &ac, bool ok, const Value &rv, Va vp->setUndefined(); return JSTRAP_ERROR; } - return shape->propid == returnId ? JSTRAP_RETURN : JSTRAP_THROW; + return shape->propid() == returnId ? JSTRAP_RETURN : JSTRAP_THROW; } bool @@ -1599,8 +1599,8 @@ Debugger::construct(JSContext *cx, uintN argc, Value *vp) * Debugger.{Frame,Object,Script}.prototype in reserved slots. The rest of * the reserved slots are for hooks; they default to undefined. */ - JSObject *obj = NewNonFunction(cx, &Debugger::jsclass, proto, NULL); - if (!obj || !obj->ensureClassReservedSlots(cx)) + JSObject *obj = NewObjectWithGivenProto(cx, &Debugger::jsclass, proto, NULL); + if (!obj) return false; for (uintN slot = JSSLOT_DEBUG_PROTO_START; slot < JSSLOT_DEBUG_PROTO_STOP; slot++) obj->setReservedSlot(slot, proto->getReservedSlot(slot)); @@ -1815,8 +1815,8 @@ Debugger::newDebuggerScript(JSContext *cx, JSScript *script) JSObject *proto = &object->getReservedSlot(JSSLOT_DEBUG_SCRIPT_PROTO).toObject(); JS_ASSERT(proto); - JSObject *scriptobj = NewNonFunction(cx, &DebuggerScript_class, proto, NULL); - if (!scriptobj || !scriptobj->ensureClassReservedSlots(cx)) + JSObject *scriptobj = NewObjectWithGivenProto(cx, &DebuggerScript_class, proto, NULL); + if (!scriptobj) return NULL; scriptobj->setReservedSlot(JSSLOT_DEBUGSCRIPT_OWNER, ObjectValue(*object)); scriptobj->setPrivate(script); @@ -2505,7 +2505,7 @@ static JSBool DebuggerArguments_getArg(JSContext *cx, uintN argc, Value *vp) { CallArgs args = CallArgsFromVp(argc, vp); - int32 i = args.callee().getReservedSlot(0).toInt32(); + int32 i = (int32) args.callee().toFunction()->getExtendedSlot(0).toInt32(); /* Check that the this value is an Arguments object. */ if (!args.thisv().isObject()) { @@ -2561,7 +2561,7 @@ DebuggerFrame_getArguments(JSContext *cx, uintN argc, Value *vp) JSObject *proto; if (!js_GetClassPrototype(cx, global, JSProto_Array, &proto)) return false; - argsobj = NewNonFunction(cx, &DebuggerArguments_class, proto, global); + argsobj = NewObjectWithGivenProto(cx, &DebuggerArguments_class, proto, global); if (!argsobj || !js_SetReservedSlot(cx, argsobj, JSSLOT_DEBUGARGUMENTS_FRAME, ObjectValue(*thisobj))) { @@ -2578,16 +2578,17 @@ DebuggerFrame_getArguments(JSContext *cx, uintN argc, Value *vp) } for (int32 i = 0; i < fargc; i++) { - JSObject *getobj = - js_NewFunction(cx, NULL, DebuggerArguments_getArg, 0, 0, global, NULL); + JSFunction *getobj = + js_NewFunction(cx, NULL, DebuggerArguments_getArg, 0, 0, global, NULL, + JSFunction::ExtendedFinalizeKind); if (!getobj || - !js_SetReservedSlot(cx, getobj, 0, Int32Value(i)) || !DefineNativeProperty(cx, argsobj, INT_TO_JSID(i), UndefinedValue(), JS_DATA_TO_FUNC_PTR(PropertyOp, getobj), NULL, JSPROP_ENUMERATE | JSPROP_SHARED | JSPROP_GETTER, 0, 0)) { return false; } + getobj->setExtendedSlot(0, Int32Value(i)); } } else { argsobj = NULL; @@ -2605,7 +2606,7 @@ DebuggerFrame_getScript(JSContext *cx, uintN argc, Value *vp) JSObject *scriptObject = NULL; if (fp->isFunctionFrame() && !fp->isEvalFrame()) { - JSFunction *callee = fp->callee().getFunctionPrivate(); + JSFunction *callee = fp->callee().toFunction(); if (callee->isInterpreted()) { scriptObject = debug->wrapScript(cx, callee->script()); if (!scriptObject) @@ -2794,7 +2795,7 @@ DebuggerFrameEval(JSContext *cx, uintN argc, Value *vp, EvalBindingsMode mode) /* If evalWithBindings, create the inner scope object. */ if (mode == WithBindings) { /* TODO - Should probably create a With object here. */ - scobj = NewNonFunction(cx, &ObjectClass, NULL, scobj); + scobj = NewObjectWithGivenProto(cx, &ObjectClass, NULL, scobj); if (!scobj) return false; for (size_t i = 0; i < keys.length(); i++) { @@ -2974,7 +2975,7 @@ DebuggerObject_getName(JSContext *cx, uintN argc, Value *vp) return true; } - JSString *name = obj->getFunctionPrivate()->atom; + JSString *name = obj->toFunction()->atom; if (!name) { args.rval().setUndefined(); return true; @@ -2996,7 +2997,7 @@ DebuggerObject_getParameterNames(JSContext *cx, uintN argc, Value *vp) return true; } - const JSFunction *fun = obj->getFunctionPrivate(); + const JSFunction *fun = obj->toFunction(); JSObject *result = NewDenseAllocatedArray(cx, fun->nargs, NULL); if (!result) return false; @@ -3034,7 +3035,7 @@ DebuggerObject_getScript(JSContext *cx, uintN argc, Value *vp) if (!obj->isFunction()) return true; - JSFunction *fun = obj->getFunctionPrivate(); + JSFunction *fun = obj->toFunction(); if (!fun->isInterpreted()) return true; @@ -3495,7 +3496,7 @@ JS_DefineDebuggerObject(JSContext *cx, JSObject *obj) JSObject *debugProto = js_InitClass(cx, obj, objProto, &Debugger::jsclass, Debugger::construct, 1, Debugger::properties, Debugger::methods, NULL, NULL, &debugCtor); - if (!debugProto || !debugProto->ensureClassReservedSlots(cx)) + if (!debugProto) return false; JSObject *frameProto = js_InitClass(cx, debugCtor, objProto, &DebuggerFrame_class, diff --git a/js/src/vm/Debugger.h b/js/src/vm/Debugger.h index bd99ee85f621..b49e0083f9e9 100644 --- a/js/src/vm/Debugger.h +++ b/js/src/vm/Debugger.h @@ -447,7 +447,7 @@ Debugger::toJSObject() const Debugger * Debugger::fromJSObject(JSObject *obj) { - JS_ASSERT(obj->getClass() == &jsclass); + JS_ASSERT(js::GetObjectClass(obj) == &jsclass); return (Debugger *) obj->getPrivate(); } diff --git a/js/src/vm/GlobalObject.cpp b/js/src/vm/GlobalObject.cpp index d2a7e8c3fd81..b60b04c8cb03 100644 --- a/js/src/vm/GlobalObject.cpp +++ b/js/src/vm/GlobalObject.cpp @@ -102,21 +102,23 @@ GlobalObject::initFunctionAndObjectClasses(JSContext *cx) * Create |Object.prototype| first, mirroring CreateBlankProto but for the * prototype of the created object. */ - JSObject *objectProto = NewNonFunction(cx, &ObjectClass, NULL, this); + JSObject *objectProto = NewObjectWithGivenProto(cx, &ObjectClass, NULL, this); if (!objectProto || !objectProto->setSingletonType(cx)) return NULL; - types::TypeObject *objectType = objectProto->getNewType(cx, NULL, /* markUnknown = */ true); - if (!objectType || !objectType->getEmptyShape(cx, &ObjectClass, gc::FINALIZE_OBJECT0)) + + /* + * The default 'new' type of Object.prototype is required by type inference + * to have unknown properties, to simplify handling of e.g. heterogenous + * objects in JSON and script literals. + */ + if (!objectProto->setNewTypeUnknown(cx)) return NULL; /* Create |Function.prototype| next so we can create other functions. */ JSFunction *functionProto; { - JSObject *proto = NewObject(cx, &FunctionClass, objectProto, this); - if (!proto || !proto->setSingletonType(cx)) - return NULL; - types::TypeObject *functionType = proto->getNewType(cx, NULL, /* markUnknown = */ true); - if (!functionType || !functionType->getEmptyShape(cx, &FunctionClass, gc::FINALIZE_OBJECT0)) + JSObject *proto = NewObjectWithGivenProto(cx, &FunctionClass, objectProto, this); + if (!proto) return NULL; /* @@ -138,14 +140,25 @@ GlobalObject::initFunctionAndObjectClasses(JSContext *cx) script->code[1] = SRC_NULL; functionProto->initScript(script); functionProto->getType(cx)->interpretedFunction = functionProto; - script->hasFunction = true; + script->setFunction(functionProto); + + if (!proto->setSingletonType(cx)) + return NULL; + + /* + * The default 'new' type of Function.prototype is required by type + * inference to have unknown properties, to simplify handling of e.g. + * CloneFunctionObject. + */ + if (!proto->setNewTypeUnknown(cx)) + return NULL; } /* Create the Object function now that we have a [[Prototype]] for it. */ jsid objectId = ATOM_TO_JSID(CLASS_ATOM(cx, Object)); JSFunction *objectCtor; { - JSObject *ctor = NewObject(cx, &FunctionClass, functionProto, this); + JSObject *ctor = NewObjectWithGivenProto(cx, &FunctionClass, functionProto, this); if (!ctor) return NULL; objectCtor = js_NewFunction(cx, ctor, js_Object, 1, JSFUN_CONSTRUCTOR, this, @@ -168,7 +181,7 @@ GlobalObject::initFunctionAndObjectClasses(JSContext *cx) JSFunction *functionCtor; { JSObject *ctor = - NewObject(cx, &FunctionClass, functionProto, this); + NewObjectWithGivenProto(cx, &FunctionClass, functionProto, this); if (!ctor) return NULL; functionCtor = js_NewFunction(cx, ctor, Function, 1, JSFUN_CONSTRUCTOR, this, @@ -248,13 +261,13 @@ GlobalObject::create(JSContext *cx, Class *clasp) { JS_ASSERT(clasp->flags & JSCLASS_IS_GLOBAL); - JSObject *obj = NewNonFunction(cx, clasp, NULL, NULL); + JSObject *obj = NewObjectWithGivenProto(cx, clasp, NULL, NULL); if (!obj || !obj->setSingletonType(cx)) return NULL; GlobalObject *globalObj = obj->asGlobal(); - globalObj->makeVarObj(); - globalObj->syncSpecialEquality(); + if (!globalObj->setVarObj(cx)) + return NULL; /* Construct a regexp statics object for this global object. */ JSObject *res = RegExpStatics::create(cx, globalObj); @@ -269,9 +282,6 @@ GlobalObject::create(JSContext *cx, Class *clasp) bool GlobalObject::initStandardClasses(JSContext *cx) { - /* Native objects get their reserved slots from birth. */ - JS_ASSERT(numSlots() >= JSSLOT_FREE(getClass())); - JSAtomState &state = cx->runtime->atomState; /* Define a top-level property 'undefined' with the undefined value. */ @@ -307,9 +317,6 @@ GlobalObject::initStandardClasses(JSContext *cx) void GlobalObject::clear(JSContext *cx) { - /* This can return false but that doesn't mean it failed. */ - unbrand(cx); - for (int key = JSProto_Null; key < JSProto_LIMIT * 3; key++) setSlot(key, UndefinedValue()); @@ -334,6 +341,12 @@ GlobalObject::clear(JSContext *cx) int32 flags = getSlot(FLAGS).toInt32(); flags |= FLAGS_CLEARED; setSlot(FLAGS, Int32Value(flags)); + + /* + * Reset the new object cache in the compartment, which assumes that + * prototypes cached on the global object are immutable. + */ + cx->compartment->newObjectCache.reset(); } bool @@ -356,9 +369,9 @@ GlobalObject::isRuntimeCodeGenEnabled(JSContext *cx) JSFunction * GlobalObject::createConstructor(JSContext *cx, Native ctor, Class *clasp, JSAtom *name, - uintN length) + uintN length, gc::AllocKind kind) { - JSFunction *fun = js_NewFunction(cx, NULL, ctor, length, JSFUN_CONSTRUCTOR, this, name); + JSFunction *fun = js_NewFunction(cx, NULL, ctor, length, JSFUN_CONSTRUCTOR, this, name, kind); if (!fun) return NULL; @@ -376,18 +389,10 @@ CreateBlankProto(JSContext *cx, Class *clasp, JSObject &proto, GlobalObject &glo JS_ASSERT(clasp != &ObjectClass); JS_ASSERT(clasp != &FunctionClass); - JSObject *blankProto = NewNonFunction(cx, clasp, &proto, &global); + JSObject *blankProto = NewObjectWithGivenProto(cx, clasp, &proto, &global); if (!blankProto || !blankProto->setSingletonType(cx)) return NULL; - /* - * Supply the created prototype object with an empty shape for the benefit - * of callers of JSObject::initSharingEmptyShape. - */ - types::TypeObject *type = blankProto->getNewType(cx); - if (!type || !type->getEmptyShape(cx, clasp, gc::FINALIZE_OBJECT0)) - return NULL; - return blankProto; } @@ -422,8 +427,6 @@ DefinePropertiesAndBrand(JSContext *cx, JSObject *obj, JSPropertySpec *ps, JSFun { if ((ps && !JS_DefineProperties(cx, obj, ps)) || (fs && !JS_DefineFunctions(cx, obj, fs))) return false; - if (!cx->typeInferenceEnabled()) - obj->brand(cx); return true; } @@ -458,7 +461,7 @@ GlobalObject::getOrCreateDebuggers(JSContext *cx) if (debuggers) return debuggers; - JSObject *obj = NewNonFunction(cx, &GlobalDebuggees_class, NULL, this); + JSObject *obj = NewObjectWithGivenProto(cx, &GlobalDebuggees_class, NULL, this); if (!obj) return NULL; debuggers = cx->new_(); diff --git a/js/src/vm/GlobalObject.h b/js/src/vm/GlobalObject.h index cc890b9d6c46..a93cce68e69d 100644 --- a/js/src/vm/GlobalObject.h +++ b/js/src/vm/GlobalObject.h @@ -190,7 +190,8 @@ class GlobalObject : public ::JSObject { * ctor, a method which creates objects with the given class. */ JSFunction * - createConstructor(JSContext *cx, JSNative ctor, Class *clasp, JSAtom *name, uintN length); + createConstructor(JSContext *cx, JSNative ctor, Class *clasp, JSAtom *name, uintN length, + gc::AllocKind kind = JSFunction::FinalizeKind); /* * Create an object to serve as [[Prototype]] for instances of the given @@ -276,20 +277,16 @@ class GlobalObject : public ::JSObject { HeapValue &v = getSlotRef(GENERATOR_PROTO); if (!v.isObject() && !js_InitIteratorClasses(cx, this)) return NULL; - JS_ASSERT(v.toObject().isGenerator()); return &v.toObject(); } + inline RegExpStatics *getRegExpStatics() const; + JSObject *getThrowTypeError() const { JS_ASSERT(functionObjectClassesInitialized()); return &getSlot(THROWTYPEERROR).toObject(); } - RegExpStatics *getRegExpStatics() const { - JSObject &resObj = getSlot(REGEXP_STATICS).toObject(); - return static_cast(resObj.getPrivate()); - } - void clear(JSContext *cx); bool isCleared() const { @@ -344,6 +341,12 @@ typedef HashSet, SystemAllocPolicy } // namespace js +inline bool +JSObject::isGlobal() const +{ + return !!(js::GetObjectClass(this)->flags & JSCLASS_IS_GLOBAL); +} + js::GlobalObject * JSObject::asGlobal() { diff --git a/js/src/vm/NumberObject-inl.h b/js/src/vm/NumberObject-inl.h index 2b7c1549e2f6..ec687bed944a 100644 --- a/js/src/vm/NumberObject-inl.h +++ b/js/src/vm/NumberObject-inl.h @@ -66,7 +66,7 @@ NumberObject::create(JSContext *cx, jsdouble d) inline NumberObject * NumberObject::createWithProto(JSContext *cx, jsdouble d, JSObject &proto) { - JSObject *obj = NewObjectWithClassProto(cx, &NumberClass, &proto, + JSObject *obj = NewObjectWithClassProto(cx, &NumberClass, &proto, NULL, gc::GetGCObjectKind(RESERVED_SLOTS)); if (!obj) return NULL; diff --git a/js/src/vm/RegExpObject-inl.h b/js/src/vm/RegExpObject-inl.h index 9197da99a9ba..6e7ffef43da7 100644 --- a/js/src/vm/RegExpObject-inl.h +++ b/js/src/vm/RegExpObject-inl.h @@ -154,7 +154,7 @@ RegExpObject::purge(JSContext *cx) { if (RegExpPrivate *rep = getPrivate()) { rep->decref(cx); - privateData = NULL; + setPrivate(NULL); } } @@ -171,26 +171,27 @@ inline bool RegExpObject::init(JSContext *cx, JSLinearString *source, RegExpFlag flags) { if (nativeEmpty()) { - const js::Shape *shape = cx->compartment->initialRegExpShape; - if (!shape) { - shape = assignInitialShape(cx); + if (isDelegate()) { + if (!assignInitialShape(cx)) + return false; + } else { + Shape *shape = assignInitialShape(cx); if (!shape) return false; - cx->compartment->initialRegExpShape = shape; + EmptyShape::insertInitialShape(cx, shape, getProto()); } - setLastProperty(shape); JS_ASSERT(!nativeEmpty()); } DebugOnly atomState = &cx->runtime->atomState; - JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->lastIndexAtom))->slot == LAST_INDEX_SLOT); - JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->sourceAtom))->slot == SOURCE_SLOT); - JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->globalAtom))->slot == GLOBAL_FLAG_SLOT); - JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->ignoreCaseAtom))->slot == + JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->lastIndexAtom))->slot() == LAST_INDEX_SLOT); + JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->sourceAtom))->slot() == SOURCE_SLOT); + JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->globalAtom))->slot() == GLOBAL_FLAG_SLOT); + JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->ignoreCaseAtom))->slot() == IGNORE_CASE_FLAG_SLOT); - JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->multilineAtom))->slot == + JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->multilineAtom))->slot() == MULTILINE_FLAG_SLOT); - JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->stickyAtom))->slot == STICKY_FLAG_SLOT); + JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(atomState->stickyAtom))->slot() == STICKY_FLAG_SLOT); JS_ASSERT(!getPrivate()); zeroLastIndex(); diff --git a/js/src/vm/RegExpObject.cpp b/js/src/vm/RegExpObject.cpp index 65d23ad60988..83839d76009d 100644 --- a/js/src/vm/RegExpObject.cpp +++ b/js/src/vm/RegExpObject.cpp @@ -107,7 +107,7 @@ RegExpObjectBuilder::getOrCreateClone(RegExpObject *proto) { JS_ASSERT(!reobj_); - JSObject *clone = NewNativeClassInstance(cx, &RegExpClass, proto, proto->getParent()); + JSObject *clone = NewObjectWithGivenProto(cx, &RegExpClass, proto, proto->getParent()); if (!clone) return false; clone->setPrivate(NULL); @@ -278,10 +278,9 @@ RegExpObject::execute(JSContext *cx, const jschar *chars, size_t length, size_t return getPrivate()->execute(cx, chars, length, lastIndex, allocScope, output); } -const Shape * +Shape * RegExpObject::assignInitialShape(JSContext *cx) { - JS_ASSERT(!cx->compartment->initialRegExpShape); JS_ASSERT(isRegExp()); JS_ASSERT(nativeEmpty()); @@ -343,8 +342,10 @@ js_XDRRegExpObject(JSXDRState *xdr, JSObject **objp) if (!reobj) return false; - reobj->clearParent(); - reobj->clearType(); + if (!reobj->clearParent(xdr->cx)) + return false; + if (!reobj->clearType(xdr->cx)) + return false; *objp = reobj; } return true; diff --git a/js/src/vm/RegExpObject.h b/js/src/vm/RegExpObject.h index d2ed1934e920..b4b44b0153e3 100644 --- a/js/src/vm/RegExpObject.h +++ b/js/src/vm/RegExpObject.h @@ -198,7 +198,7 @@ class RegExpObject : public ::JSObject * encoding their initial properties. Return the shape after * changing this regular expression object's last property to it. */ - const Shape *assignInitialShape(JSContext *cx); + Shape *assignInitialShape(JSContext *cx); RegExpObject(); RegExpObject &operator=(const RegExpObject &reo); diff --git a/js/src/vm/RegExpStatics-inl.h b/js/src/vm/RegExpStatics-inl.h index feee5619cf29..95000045b3eb 100644 --- a/js/src/vm/RegExpStatics-inl.h +++ b/js/src/vm/RegExpStatics-inl.h @@ -47,6 +47,13 @@ namespace js { +inline js::RegExpStatics * +js::GlobalObject::getRegExpStatics() const +{ + JSObject &resObj = getSlot(REGEXP_STATICS).toObject(); + return static_cast(resObj.getPrivate()); +} + inline RegExpStatics::RegExpStatics() : bufferLink(NULL), @@ -304,4 +311,10 @@ RegExpStatics::reset(JSContext *cx, JSString *newInput, bool newMultiline) } /* namespace js */ +inline js::RegExpStatics * +JSContext::regExpStatics() +{ + return js::GetGlobalForScopeChain(this)->getRegExpStatics(); +} + #endif diff --git a/js/src/vm/RegExpStatics.cpp b/js/src/vm/RegExpStatics.cpp index 1ed77ecefb27..86f9ba262fb6 100644 --- a/js/src/vm/RegExpStatics.cpp +++ b/js/src/vm/RegExpStatics.cpp @@ -92,12 +92,12 @@ static Class regexp_statics_class = { JSObject * RegExpStatics::create(JSContext *cx, GlobalObject *parent) { - JSObject *obj = NewObject(cx, ®exp_statics_class, NULL, parent); + JSObject *obj = NewObjectWithGivenProto(cx, ®exp_statics_class, NULL, parent); if (!obj) return NULL; RegExpStatics *res = cx->new_(); if (!res) return NULL; - obj->initPrivate(static_cast(res)); + obj->setPrivate(static_cast(res)); return obj; } diff --git a/js/src/vm/Stack-inl.h b/js/src/vm/Stack-inl.h index cf965e9b355f..a6e5587edb73 100644 --- a/js/src/vm/Stack-inl.h +++ b/js/src/vm/Stack-inl.h @@ -54,6 +54,47 @@ namespace js { +/* + * We cache name lookup results only for the global object or for native + * non-global objects without prototype or with prototype that never mutates, + * see bug 462734 and bug 487039. + */ +static inline bool +IsCacheableNonGlobalScope(JSObject *obj) +{ + bool cacheable = (obj->isCall() || obj->isBlock() || obj->isDeclEnv()); + + JS_ASSERT_IF(cacheable, !obj->getOps()->lookupProperty); + return cacheable; +} + +inline JSObject & +StackFrame::scopeChain() const +{ + JS_ASSERT_IF(!(flags_ & HAS_SCOPECHAIN), isFunctionFrame()); + if (!(flags_ & HAS_SCOPECHAIN)) { + scopeChain_ = callee().toFunction()->environment(); + flags_ |= HAS_SCOPECHAIN; + } + return *scopeChain_; +} + +inline JSObject & +StackFrame::varObj() +{ + JSObject *obj = &scopeChain(); + while (!obj->isVarObj()) + obj = obj->scopeChain(); + return *obj; +} + +inline JSCompartment * +StackFrame::compartment() const +{ + JS_ASSERT_IF(isScriptFrame(), scopeChain().compartment() == script()->compartment()); + return scopeChain().compartment(); +} + inline void StackFrame::initPrev(JSContext *cx) { @@ -103,21 +144,20 @@ StackFrame::resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc) } inline void -StackFrame::initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun, +StackFrame::initCallFrame(JSContext *cx, JSFunction &callee, JSScript *script, uint32 nactual, StackFrame::Flags flagsArg) { JS_ASSERT((flagsArg & ~(CONSTRUCTING | LOWERED_CALL_APPLY | OVERFLOW_ARGS | UNDERFLOW_ARGS)) == 0); - JS_ASSERT(fun == callee.getFunctionPrivate()); - JS_ASSERT(script == fun->script()); + JS_ASSERT(script == callee.toFunction()->script()); /* Initialize stack frame members. */ flags_ = FUNCTION | HAS_PREVPC | HAS_SCOPECHAIN | flagsArg; - exec.fun = fun; + exec.fun = &callee; args.nactual = nactual; - scopeChain_ = callee.getParent(); + scopeChain_ = callee.toFunction()->environment(); ncode_ = NULL; initPrev(cx); JS_ASSERT(!hasHookData()); @@ -152,8 +192,8 @@ StackFrame::resetCallFrame(JSScript *script) HAS_PREVPC | UNDERFLOW_ARGS; - JS_ASSERT(exec.fun == callee().getFunctionPrivate()); - scopeChain_ = callee().getParent(); + JS_ASSERT(exec.fun->script() == callee().toFunction()->script()); + scopeChain_ = callee().toFunction()->environment(); SetValueRangeToUndefined(slots(), script->nfixed); } @@ -202,6 +242,13 @@ StackFrame::initJitFrameLatePrologue(JSContext *cx, Value **limit) return true; } +inline void +StackFrame::overwriteCallee(JSObject &newCallee) +{ + JS_ASSERT(callee().toFunction()->script() == newCallee.toFunction()->script()); + mutableCalleev().setObject(newCallee); +} + inline Value & StackFrame::canonicalActualArg(uintN i) const { @@ -328,10 +375,10 @@ StackFrame::setScopeChainNoCallObj(JSObject &obj) if (hasCallObj()) { JSObject *pobj = &obj; while (pobj && pobj->getPrivate() != this) - pobj = pobj->getParent(); + pobj = pobj->scopeChain(); JS_ASSERT(pobj); } else { - for (JSObject *pobj = &obj; pobj; pobj = pobj->getParent()) + for (JSObject *pobj = &obj; pobj->isInternalScope(); pobj = pobj->scopeChain()) JS_ASSERT_IF(pobj->isCall(), pobj->getPrivate() != this); } } @@ -356,7 +403,7 @@ StackFrame::callObj() const JSObject *pobj = &scopeChain(); while (JS_UNLIKELY(!pobj->isCall())) - pobj = pobj->getParent(); + pobj = pobj->scopeChain(); return pobj->asCall(); } @@ -423,12 +470,12 @@ StackFrame::markFunctionEpilogueDone() * For function frames, the call object may or may not have have an * enclosing DeclEnv object, so we use the callee's parent, since * it was the initial scope chain. For global (strict) eval frames, - * there is no calle, but the call object's parent is the initial + * there is no callee, but the call object's parent is the initial * scope chain. */ scopeChain_ = isFunctionFrame() - ? callee().getParent() - : scopeChain_->getParent(); + ? callee().toFunction()->environment() + : scopeChain_->internalScopeChain(); flags_ &= ~HAS_CALL_OBJ; } } @@ -513,22 +560,21 @@ ContextStack::getCallFrame(JSContext *cx, MaybeReportError report, const CallArg JS_ALWAYS_INLINE bool ContextStack::pushInlineFrame(JSContext *cx, FrameRegs ®s, const CallArgs &args, - JSObject &callee, JSFunction *fun, JSScript *script, + JSFunction &callee, JSScript *script, InitialFrameFlags initial) { JS_ASSERT(onTop()); JS_ASSERT(regs.sp == args.end()); /* Cannot assert callee == args.callee() since this is called from LeaveTree. */ - JS_ASSERT(callee.getFunctionPrivate() == fun); - JS_ASSERT(fun->script() == script); + JS_ASSERT(script == callee.toFunction()->script()); /*StackFrame::Flags*/ uint32 flags = ToFrameFlags(initial); - StackFrame *fp = getCallFrame(cx, REPORT_ERROR, args, fun, script, &flags); + StackFrame *fp = getCallFrame(cx, REPORT_ERROR, args, &callee, script, &flags); if (!fp) return false; /* Initialize frame, locals, regs. */ - fp->initCallFrame(cx, callee, fun, script, args.length(), (StackFrame::Flags) flags); + fp->initCallFrame(cx, callee, script, args.length(), (StackFrame::Flags) flags); /* * N.B. regs may differ from the active registers, if the parent is about @@ -540,10 +586,10 @@ ContextStack::pushInlineFrame(JSContext *cx, FrameRegs ®s, const CallArgs &ar JS_ALWAYS_INLINE bool ContextStack::pushInlineFrame(JSContext *cx, FrameRegs ®s, const CallArgs &args, - JSObject &callee, JSFunction *fun, JSScript *script, + JSFunction &callee, JSScript *script, InitialFrameFlags initial, Value **stackLimit) { - if (!pushInlineFrame(cx, regs, args, callee, fun, script, initial)) + if (!pushInlineFrame(cx, regs, args, callee, script, initial)) return false; *stackLimit = space().conservativeEnd_; return true; @@ -555,7 +601,7 @@ ContextStack::getFixupFrame(JSContext *cx, MaybeReportError report, void *ncode, InitialFrameFlags initial, Value **stackLimit) { JS_ASSERT(onTop()); - JS_ASSERT(args.callee().getFunctionPrivate() == fun); + JS_ASSERT(fun->script() == args.callee().toFunction()->script()); JS_ASSERT(fun->script() == script); /*StackFrame::Flags*/ uint32 flags = ToFrameFlags(initial); diff --git a/js/src/vm/Stack.cpp b/js/src/vm/Stack.cpp index f84af2621686..11a462e1dbee 100644 --- a/js/src/vm/Stack.cpp +++ b/js/src/vm/Stack.cpp @@ -151,7 +151,7 @@ StackFrame::stealFrameAndSlots(Value *vp, StackFrame *otherfp, obj.setPrivate(this); otherfp->flags_ &= ~HAS_CALL_OBJ; if (js_IsNamedLambda(fun())) { - JSObject *env = obj.getParent(); + JSObject *env = obj.internalScopeChain(); JS_ASSERT(env->isDeclEnv()); env->setPrivate(this); } @@ -679,7 +679,7 @@ ContextStack::pushInvokeFrame(JSContext *cx, const CallArgs &args, JS_ASSERT(space().firstUnused() == args.end()); JSObject &callee = args.callee(); - JSFunction *fun = callee.getFunctionPrivate(); + JSFunction *fun = callee.toFunction(); JSScript *script = fun->script(); /*StackFrame::Flags*/ uint32 flags = ToFrameFlags(initial); @@ -687,7 +687,7 @@ ContextStack::pushInvokeFrame(JSContext *cx, const CallArgs &args, if (!fp) return false; - fp->initCallFrame(cx, callee, fun, script, args.length(), (StackFrame::Flags) flags); + fp->initCallFrame(cx, *fun, script, args.length(), (StackFrame::Flags) flags); ifg->regs_.prepareToRun(*fp, script); ifg->prevRegs_ = seg_->pushRegs(ifg->regs_); diff --git a/js/src/vm/Stack.h b/js/src/vm/Stack.h index 5c08ed12b4bd..2628514700e4 100644 --- a/js/src/vm/Stack.h +++ b/js/src/vm/Stack.h @@ -393,7 +393,7 @@ class StackFrame */ /* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */ - void initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun, + void initCallFrame(JSContext *cx, JSFunction &callee, JSScript *script, uint32 nactual, StackFrame::Flags flags); /* Used for SessionInvoke. */ @@ -766,10 +766,7 @@ class StackFrame * only be changed to something that is equivalent to the current callee in * terms of numFormalArgs etc. Prefer overwriteCallee since it checks. */ - void overwriteCallee(JSObject &newCallee) { - JS_ASSERT(callee().getFunctionPrivate() == newCallee.getFunctionPrivate()); - mutableCalleev().setObject(newCallee); - } + inline void overwriteCallee(JSObject &newCallee); Value &mutableCalleev() const { JS_ASSERT(isFunctionFrame()); @@ -819,14 +816,7 @@ class StackFrame * !fp->hasCall() && fp->scopeChain().isCall() */ - JSObject &scopeChain() const { - JS_ASSERT_IF(!(flags_ & HAS_SCOPECHAIN), isFunctionFrame()); - if (!(flags_ & HAS_SCOPECHAIN)) { - scopeChain_ = callee().getParent(); - flags_ |= HAS_SCOPECHAIN; - } - return *scopeChain_; - } + inline JSObject &scopeChain() const; bool hasCallObj() const { bool ret = !!(flags_ & HAS_CALL_OBJ); @@ -875,12 +865,7 @@ class StackFrame * variables object to collect and discard the script's global variables. */ - JSObject &varObj() { - JSObject *obj = &scopeChain(); - while (!obj->isVarObj()) - obj = obj->getParent(); - return *obj; - } + inline JSObject &varObj(); /* * Frame compartment @@ -889,10 +874,7 @@ class StackFrame * compartment when the frame was pushed. */ - JSCompartment *compartment() const { - JS_ASSERT_IF(isScriptFrame(), scopeChain().compartment() == script()->compartment()); - return scopeChain().compartment(); - } + inline JSCompartment *compartment() const; /* Annotation (will be removed after bug 546848) */ diff --git a/js/src/vm/StackSpace.h b/js/src/vm/StackSpace.h index f5abab80dce7..3b265fc2503c 100644 --- a/js/src/vm/StackSpace.h +++ b/js/src/vm/StackSpace.h @@ -311,10 +311,10 @@ class ContextStack * The 'stackLimit' overload updates 'stackLimit' if it changes. */ bool pushInlineFrame(JSContext *cx, FrameRegs ®s, const CallArgs &args, - JSObject &callee, JSFunction *fun, JSScript *script, + JSFunction &callee, JSScript *script, InitialFrameFlags initial); bool pushInlineFrame(JSContext *cx, FrameRegs ®s, const CallArgs &args, - JSObject &callee, JSFunction *fun, JSScript *script, + JSFunction &callee, JSScript *script, InitialFrameFlags initial, Value **stackLimit); void popInlineFrame(FrameRegs ®s); diff --git a/js/src/vm/String-inl.h b/js/src/vm/String-inl.h index 6fecbf820140..771e6929cbe0 100644 --- a/js/src/vm/String-inl.h +++ b/js/src/vm/String-inl.h @@ -352,7 +352,7 @@ js::StaticStrings::lookup(const jschar *chars, size_t length) } JS_ALWAYS_INLINE void -JSString::finalize(JSContext *cx) +JSString::finalize(JSContext *cx, bool background) { /* Shorts are in a different arena. */ JS_ASSERT(!isShort()); @@ -377,7 +377,7 @@ JSFlatString::finalize(JSRuntime *rt) } inline void -JSShortString::finalize(JSContext *cx) +JSShortString::finalize(JSContext *cx, bool background) { JS_ASSERT(JSString::isShort()); } @@ -393,7 +393,7 @@ JSAtom::finalize(JSRuntime *rt) } inline void -JSExternalString::finalize(JSContext *cx) +JSExternalString::finalize(JSContext *cx, bool background) { if (JSStringFinalizeOp finalizer = str_finalizers[externalType()]) finalizer(cx, this); diff --git a/js/src/vm/String.h b/js/src/vm/String.h index 22db9afc3cb6..d3b129a0005d 100644 --- a/js/src/vm/String.h +++ b/js/src/vm/String.h @@ -403,7 +403,7 @@ class JSString : public js::gc::Cell /* Only called by the GC for strings with the FINALIZE_STRING kind. */ - inline void finalize(JSContext *cx); + inline void finalize(JSContext *cx, bool background); /* Gets the number of bytes that the chars take on the heap. */ @@ -628,7 +628,7 @@ class JSShortString : public JSInlineString /* Only called by the GC for strings with the FINALIZE_EXTERNAL_STRING kind. */ - JS_ALWAYS_INLINE void finalize(JSContext *cx); + JS_ALWAYS_INLINE void finalize(JSContext *cx, bool background); }; JS_STATIC_ASSERT(sizeof(JSShortString) == 2 * sizeof(JSString)); @@ -680,7 +680,7 @@ class JSExternalString : public JSFixedString /* Only called by the GC for strings with the FINALIZE_EXTERNAL_STRING kind. */ - void finalize(JSContext *cx); + void finalize(JSContext *cx, bool background); void finalize(); }; diff --git a/js/src/vm/StringObject-inl.h b/js/src/vm/StringObject-inl.h index b8068546ae44..b1ff071cb133 100644 --- a/js/src/vm/StringObject-inl.h +++ b/js/src/vm/StringObject-inl.h @@ -52,6 +52,30 @@ JSObject::asString() namespace js { +inline bool +StringObject::init(JSContext *cx, JSString *str) +{ + JS_ASSERT(gc::GetGCKindSlots(getAllocKind()) == 2); + + if (nativeEmpty()) { + if (isDelegate()) { + if (!assignInitialShape(cx)) + return false; + } else { + Shape *shape = assignInitialShape(cx); + if (!shape) + return false; + EmptyShape::insertInitialShape(cx, shape, getProto()); + } + } + + JS_ASSERT(!nativeEmpty()); + JS_ASSERT(nativeLookup(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom))->slot() == LENGTH_SLOT); + + setStringThis(str); + return true; +} + inline StringObject * StringObject::create(JSContext *cx, JSString *str) { @@ -67,8 +91,7 @@ StringObject::create(JSContext *cx, JSString *str) inline StringObject * StringObject::createWithProto(JSContext *cx, JSString *str, JSObject &proto) { - JS_ASSERT(gc::FINALIZE_OBJECT2 == gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(&StringClass))); - JSObject *obj = NewObjectWithClassProto(cx, &StringClass, &proto, gc::FINALIZE_OBJECT2); + JSObject *obj = NewObjectWithClassProto(cx, &StringClass, &proto, NULL); if (!obj) return NULL; StringObject *strobj = obj->asString(); diff --git a/js/src/vm/StringObject.h b/js/src/vm/StringObject.h index d7a7ae6821b6..cc25583a5dc9 100644 --- a/js/src/vm/StringObject.h +++ b/js/src/vm/StringObject.h @@ -92,7 +92,7 @@ class StringObject : public ::JSObject * encodes the initial length property. Return the shape after changing * this String object's last property to it. */ - const js::Shape *assignInitialShape(JSContext *cx); + Shape *assignInitialShape(JSContext *cx); private: StringObject(); diff --git a/js/xpconnect/idl/xpccomponents.idl b/js/xpconnect/idl/xpccomponents.idl index 9ee3b58e6266..973169eded18 100644 --- a/js/xpconnect/idl/xpccomponents.idl +++ b/js/xpconnect/idl/xpccomponents.idl @@ -153,7 +153,7 @@ interface ScheduledGCCallback : nsISupports /** * interface of Components.utils */ -[scriptable, uuid(d6916b9e-0947-400f-8552-81fd96312c9d)] +[scriptable, uuid(1fbd5d17-707f-4874-9635-59ad2438caf0)] interface nsIXPCComponents_Utils : nsISupports { @@ -288,7 +288,8 @@ interface nsIXPCComponents_Utils : nsISupports * @param obj The JavaScript object whose global is to be gotten. * @return the corresponding global. */ - void /* JSObject */ getGlobalForObject(/* in JSObject obj */); + [implicit_jscontext] + jsval getGlobalForObject(in jsval obj); /* * To be called from JS only. @@ -318,13 +319,27 @@ interface nsIXPCComponents_Utils : nsISupports * NB: Assume that getting any of these attributes is relatively * cheap, but setting any of them is relatively expensive. */ + [implicit_jscontext] attribute boolean strict; + + [implicit_jscontext] attribute boolean werror; + + [implicit_jscontext] attribute boolean atline; + + [implicit_jscontext] attribute boolean xml; + + [implicit_jscontext] attribute boolean relimit; + + [implicit_jscontext] attribute boolean methodjit; + + [implicit_jscontext] attribute boolean methodjit_always; + [implicit_jscontext] void setGCZeal(in long zeal); }; diff --git a/js/xpconnect/idl/xpcjsid.idl b/js/xpconnect/idl/xpcjsid.idl index 1f9c7f5e3cbf..2c5e52dad979 100644 --- a/js/xpconnect/idl/xpcjsid.idl +++ b/js/xpconnect/idl/xpcjsid.idl @@ -1,6 +1,5 @@ -/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- - * - * ***** BEGIN LICENSE BLOCK ***** +/* -*- Mode: IDL; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ +/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version @@ -65,11 +64,11 @@ interface nsIJSIID : nsIJSID { }; -[scriptable, uuid(26b2a374-6eaf-46d4-acaf-1c6be152d36b)] +[scriptable, uuid(bf5eb086-9eaa-4694-aec3-fe4aac6119bd)] interface nsIJSCID : nsIJSID { - nsISupports createInstance(); - nsISupports getService(); + [implicit_jscontext,optional_argc] jsval createInstance([optional] in jsval iid); + [implicit_jscontext,optional_argc] jsval getService([optional] in jsval iid); }; /* this goes into the C++ header verbatim. */ diff --git a/js/xpconnect/src/XPCComponents.cpp b/js/xpconnect/src/XPCComponents.cpp index b13d0a380492..58e155464998 100644 --- a/js/xpconnect/src/XPCComponents.cpp +++ b/js/xpconnect/src/XPCComponents.cpp @@ -3813,57 +3813,21 @@ nsXPCComponents_Utils::NondeterministicGetWeakMapKeys(const jsval &aMap, /* void getGlobalForObject(); */ NS_IMETHODIMP -nsXPCComponents_Utils::GetGlobalForObject() +nsXPCComponents_Utils::GetGlobalForObject(const JS::Value& object, + JSContext *cx, + JS::Value *retval) { - nsresult rv; - nsCOMPtr xpc(do_GetService(nsIXPConnect::GetCID(), &rv)); - if (NS_FAILED(rv)) - return NS_ERROR_FAILURE; - - // get the xpconnect native call context - nsAXPCNativeCallContext *cc = nsnull; - xpc->GetCurrentNativeCallContext(&cc); - if (!cc) - return NS_ERROR_FAILURE; - - // Get JSContext of current call - JSContext* cx; - rv = cc->GetJSContext(&cx); - if (NS_FAILED(rv) || !cx) - return NS_ERROR_FAILURE; - - // get place for return value - jsval *rval = nsnull; - rv = cc->GetRetValPtr(&rval); - if (NS_FAILED(rv) || !rval) - return NS_ERROR_FAILURE; - - // get argc and argv and verify arg count - PRUint32 argc; - rv = cc->GetArgc(&argc); - if (NS_FAILED(rv)) - return NS_ERROR_FAILURE; - - if (argc != 1) - return NS_ERROR_XPC_NOT_ENOUGH_ARGS; - - jsval* argv; - rv = cc->GetArgvPtr(&argv); - if (NS_FAILED(rv) || !argv) - return NS_ERROR_FAILURE; - - // first argument must be an object - if (JSVAL_IS_PRIMITIVE(argv[0])) + // First argument must be an object. + if (JSVAL_IS_PRIMITIVE(object)) return NS_ERROR_XPC_BAD_CONVERT_JS; - JSObject *obj = JS_GetGlobalForObject(cx, JSVAL_TO_OBJECT(argv[0])); - *rval = OBJECT_TO_JSVAL(obj); + JSObject *obj = JS_GetGlobalForObject(cx, JSVAL_TO_OBJECT(object)); + *retval = OBJECT_TO_JSVAL(obj); // Outerize if necessary. if (JSObjectOp outerize = js::GetObjectClass(obj)->ext.outerObject) - *rval = OBJECT_TO_JSVAL(outerize(cx, obj)); + *retval = OBJECT_TO_JSVAL(outerize(cx, obj)); - cc->SetReturnValueWasSet(true); return NS_OK; } @@ -3899,9 +3863,7 @@ nsXPCComponents_Utils::CreateObjectIn(const jsval &vobj, JSContext *cx, jsval *r JSBool FunctionWrapper(JSContext *cx, uintN argc, jsval *vp) { - jsval v; - if (!JS_GetReservedSlot(cx, JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)), 0, &v)) - return false; + jsval v = js::GetFunctionNativeReserved(JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)), 0); NS_ASSERTION(JSVAL_IS_OBJECT(v), "weird function"); JSObject *obj = JS_THIS_OBJECT(cx, vp); @@ -3914,14 +3876,13 @@ FunctionWrapper(JSContext *cx, uintN argc, jsval *vp) JSBool WrapCallable(JSContext *cx, JSObject *obj, jsid id, JSObject *propobj, jsval *vp) { - JSFunction *fun = JS_NewFunctionById(cx, FunctionWrapper, 0, 0, - JS_GetGlobalForObject(cx, obj), id); + JSFunction *fun = js::NewFunctionByIdWithReserved(cx, FunctionWrapper, 0, 0, + JS_GetGlobalForObject(cx, obj), id); if (!fun) return false; JSObject *funobj = JS_GetFunctionObject(fun); - if (!JS_SetReservedSlot(cx, funobj, 0, OBJECT_TO_JSVAL(propobj))) - return false; + js::SetFunctionNativeReserved(funobj, 0, OBJECT_TO_JSVAL(propobj)); *vp = OBJECT_TO_JSVAL(funobj); return true; } @@ -4025,46 +3986,16 @@ SetBoolOption(JSContext* cx, uint32 aOption, bool aValue) return NS_OK; } -// FIXME/bug 671453: work around broken [implicit_jscontext] -nsresult -GetCurrentJSContext(JSContext** aCx) -{ - nsresult rv; - - nsCOMPtr xpc(do_GetService(nsIXPConnect::GetCID(), &rv)); - if(NS_FAILED(rv)) - return rv; - - // get the xpconnect native call context - nsAXPCNativeCallContext *cc = nsnull; - xpc->GetCurrentNativeCallContext(&cc); - if(!cc) - return NS_ERROR_FAILURE; - - // Get JSContext of current call - JSContext* cx; - rv = cc->GetJSContext(&cx); - if(NS_FAILED(rv) || !cx) - return NS_ERROR_FAILURE; - - *aCx = cx; - return NS_OK; -} - #define GENERATE_JSOPTION_GETTER_SETTER(_attr, _flag) \ NS_IMETHODIMP \ - nsXPCComponents_Utils::Get## _attr(bool* aValue) \ + nsXPCComponents_Utils::Get## _attr(JSContext* cx, bool* aValue) \ { \ - JSContext* cx; \ - nsresult rv = GetCurrentJSContext(&cx); \ - return NS_FAILED(rv) ? rv : GetBoolOption(cx, _flag, aValue); \ + return GetBoolOption(cx, _flag, aValue); \ } \ NS_IMETHODIMP \ - nsXPCComponents_Utils::Set## _attr(bool aValue) \ + nsXPCComponents_Utils::Set## _attr(JSContext* cx, bool aValue) \ { \ - JSContext* cx; \ - nsresult rv = GetCurrentJSContext(&cx); \ - return NS_FAILED(rv) ? rv : SetBoolOption(cx, _flag, aValue); \ + return SetBoolOption(cx, _flag, aValue); \ } GENERATE_JSOPTION_GETTER_SETTER(Strict, JSOPTION_STRICT) diff --git a/js/xpconnect/src/XPCConvert.cpp b/js/xpconnect/src/XPCConvert.cpp index b86460bd5637..d16d22c3a8f8 100644 --- a/js/xpconnect/src/XPCConvert.cpp +++ b/js/xpconnect/src/XPCConvert.cpp @@ -58,6 +58,8 @@ #include "dombindings.h" #include "nsWrapperCacheInlines.h" +#include "jstypedarray.h" + using namespace mozilla; //#define STRICT_CHECK_OF_UNICODE @@ -1036,6 +1038,9 @@ XPCConvert::NativeInterface2JSObject(XPCLazyCallContext& lccx, return true; } + if (JS_IsExceptionPending(cx)) + return false; + // Even if ConstructSlimWrapper returns false it might have created a // wrapper (while calling the PreCreate hook). In that case we need to // fall through because we either have a slim wrapper that needs to be @@ -1686,13 +1691,169 @@ failure: #undef POPULATE } + + +// Check that the tag part of the type matches the type +// of the array. If the check succeeds, check that the size +// of the output does not exceed PR_UINT32_MAX bytes. Allocate +// the memory and copy the elements by memcpy. +static JSBool +CheckTargetAndPopulate(const nsXPTType& type, + PRUint8 requiredType, + size_t typeSize, + JSUint32 count, + JSObject* tArr, + void** output, + nsresult* pErr) +{ + // Check that the element type expected by the interface matches + // the type of the elements in the typed array exactly, including + // signedness. + if (type.TagPart() != requiredType) { + if (pErr) + *pErr = NS_ERROR_XPC_BAD_CONVERT_JS; + + return false; + } + + // Calulate the maximum number of elements that can fit in + // PR_UINT32_MAX bytes. + size_t max = PR_UINT32_MAX / typeSize; + + // This could overflow on 32-bit systems so check max first. + size_t byteSize = count * typeSize; + if (count > max || !(*output = nsMemory::Alloc(byteSize))) { + if (pErr) + *pErr = NS_ERROR_OUT_OF_MEMORY; + + return false; + } + + memcpy(*output, JS_GetTypedArrayData(tArr), byteSize); + return true; +} + +// Fast conversion of typed arrays to native using memcpy. +// No float or double canonicalization is done. Called by +// JSarray2Native whenever a TypedArray is met. ArrayBuffers +// are not accepted; create a properly typed array view on them +// first. The element type of array must match the XPCOM +// type in size, type and signedness exactly. As an exception, +// Uint8ClampedArray is allowed for arrays of uint8. + +// static +JSBool +XPCConvert::JSTypedArray2Native(XPCCallContext& ccx, + void** d, + JSObject* jsArray, + JSUint32 count, + const nsXPTType& type, + nsresult* pErr) +{ + NS_ABORT_IF_FALSE(jsArray, "bad param"); + NS_ABORT_IF_FALSE(d, "bad param"); + NS_ABORT_IF_FALSE(js_IsTypedArray(jsArray), "not a typed array"); + + // Check the actual length of the input array against the + // given size_is. + JSUint32 len = JS_GetTypedArrayLength(jsArray); + if (len < count) { + if (pErr) + *pErr = NS_ERROR_XPC_NOT_ENOUGH_ELEMENTS_IN_ARRAY; + + return false; + } + + void* output = nsnull; + + switch (JS_GetTypedArrayType(jsArray)) { + case js::TypedArray::TYPE_INT8: + if (!CheckTargetAndPopulate(nsXPTType::T_I8, type, + sizeof(int8), count, + jsArray, &output, pErr)) { + return false; + } + break; + + case js::TypedArray::TYPE_UINT8: + case js::TypedArray::TYPE_UINT8_CLAMPED: + if (!CheckTargetAndPopulate(nsXPTType::T_U8, type, + sizeof(uint8), count, + jsArray, &output, pErr)) { + return false; + } + break; + + case js::TypedArray::TYPE_INT16: + if (!CheckTargetAndPopulate(nsXPTType::T_I16, type, + sizeof(int16), count, + jsArray, &output, pErr)) { + return false; + } + break; + + case js::TypedArray::TYPE_UINT16: + if (!CheckTargetAndPopulate(nsXPTType::T_U16, type, + sizeof(uint16), count, + jsArray, &output, pErr)) { + return false; + } + break; + + case js::TypedArray::TYPE_INT32: + if (!CheckTargetAndPopulate(nsXPTType::T_I32, type, + sizeof(int32), count, + jsArray, &output, pErr)) { + return false; + } + break; + + case js::TypedArray::TYPE_UINT32: + if (!CheckTargetAndPopulate(nsXPTType::T_U32, type, + sizeof(uint32), count, + jsArray, &output, pErr)) { + return false; + } + break; + + case js::TypedArray::TYPE_FLOAT32: + if (!CheckTargetAndPopulate(nsXPTType::T_FLOAT, type, + sizeof(float), count, + jsArray, &output, pErr)) { + return false; + } + break; + + case js::TypedArray::TYPE_FLOAT64: + if (!CheckTargetAndPopulate(nsXPTType::T_DOUBLE, type, + sizeof(double), count, + jsArray, &output, pErr)) { + return false; + } + break; + + // Yet another array type was defined? It is not supported yet... + default: + if (pErr) + *pErr = NS_ERROR_XPC_BAD_CONVERT_JS; + + return false; + } + + *d = output; + if (pErr) + *pErr = NS_OK; + + return true; +} + // static JSBool XPCConvert::JSArray2Native(XPCCallContext& ccx, void** d, jsval s, JSUint32 count, const nsXPTType& type, - const nsID* iid, uintN* pErr) + const nsID* iid, nsresult* pErr) { - NS_PRECONDITION(d, "bad param"); + NS_ABORT_IF_FALSE(d, "bad param"); JSContext* cx = ccx.GetJSContext(); @@ -1728,13 +1889,19 @@ XPCConvert::JSArray2Native(XPCCallContext& ccx, void** d, jsval s, } jsarray = JSVAL_TO_OBJECT(s); + + // If this is a typed array, then do a fast conversion with memcpy. + if (js_IsTypedArray(jsarray)) { + return JSTypedArray2Native(ccx, d, jsarray, count, type, pErr); + } + if (!JS_IsArrayObject(cx, jsarray)) { if (pErr) *pErr = NS_ERROR_XPC_CANT_CONVERT_OBJECT_TO_ARRAY; return false; } - jsuint len; + JSUint32 len; if (!JS_GetArrayLength(cx, jsarray, &len) || len < count) { if (pErr) *pErr = NS_ERROR_XPC_NOT_ENOUGH_ELEMENTS_IN_ARRAY; @@ -1778,7 +1945,7 @@ XPCConvert::JSArray2Native(XPCCallContext& ccx, void** d, jsval s, case nsXPTType::T_U64 : POPULATE(na, uint64); break; case nsXPTType::T_FLOAT : POPULATE(na, float); break; case nsXPTType::T_DOUBLE : POPULATE(na, double); break; - case nsXPTType::T_BOOL : POPULATE(na, bool); break; + case nsXPTType::T_BOOL : POPULATE(na, bool); break; case nsXPTType::T_CHAR : POPULATE(na, char); break; case nsXPTType::T_WCHAR : POPULATE(na, jschar); break; case nsXPTType::T_VOID : NS_ERROR("bad type"); goto failure; @@ -1871,7 +2038,7 @@ XPCConvert::NativeStringWithSize2JS(JSContext* cx, JSBool XPCConvert::JSStringWithSize2Native(XPCCallContext& ccx, void* d, jsval s, JSUint32 count, const nsXPTType& type, - uintN* pErr) + nsresult* pErr) { NS_PRECONDITION(!JSVAL_IS_NULL(s), "bad param"); NS_PRECONDITION(d, "bad param"); diff --git a/js/xpconnect/src/XPCJSID.cpp b/js/xpconnect/src/XPCJSID.cpp index 540d14a96c62..fba9f6a69d81 100644 --- a/js/xpconnect/src/XPCJSID.cpp +++ b/js/xpconnect/src/XPCJSID.cpp @@ -679,14 +679,13 @@ nsJSCID::NewID(const char* str) } static const nsID* -GetIIDArg(PRUint32 argc, jsval* argv, JSContext* cx) +GetIIDArg(PRUint32 argc, const JS::Value& val, JSContext* cx) { const nsID* iid; // If an IID was passed in then use it if (argc) { JSObject* iidobj; - jsval val = *argv; if (JSVAL_IS_PRIMITIVE(val) || !(iidobj = JSVAL_TO_OBJECT(val)) || !(iid = xpc_JSObjectToID(cx, iidobj))) { @@ -698,39 +697,39 @@ GetIIDArg(PRUint32 argc, jsval* argv, JSContext* cx) return iid; } +static JSObject* +GetWrapperObject() +{ + nsXPConnect* xpc = nsXPConnect::GetXPConnect(); + if (!xpc) + return NULL; + + nsAXPCNativeCallContext *ccxp = NULL; + xpc->GetCurrentNativeCallContext(&ccxp); + if (!ccxp) + return NULL; + + nsCOMPtr wrapper; + ccxp->GetCalleeWrapper(getter_AddRefs(wrapper)); + JSObject* obj; + wrapper->GetJSObject(&obj); + return obj; +} + /* nsISupports createInstance (); */ NS_IMETHODIMP -nsJSCID::CreateInstance(nsISupports **_retval) +nsJSCID::CreateInstance(const JS::Value& iidval, JSContext* cx, + PRUint8 optionalArgc, JS::Value* retval) { if (!mDetails.IsValid()) return NS_ERROR_XPC_BAD_CID; - nsXPConnect* xpc = nsXPConnect::GetXPConnect(); - if (!xpc) + JSObject* obj = GetWrapperObject(); + if (!obj) { return NS_ERROR_UNEXPECTED; - - nsAXPCNativeCallContext *ccxp = nsnull; - xpc->GetCurrentNativeCallContext(&ccxp); - if (!ccxp) - return NS_ERROR_UNEXPECTED; - - PRUint32 argc; - jsval * argv; - jsval * vp; - JSContext* cx; - JSObject* obj; - - ccxp->GetJSContext(&cx); - ccxp->GetArgc(&argc); - ccxp->GetArgvPtr(&argv); - ccxp->GetRetValPtr(&vp); - - nsCOMPtr wrapper; - ccxp->GetCalleeWrapper(getter_AddRefs(wrapper)); - wrapper->GetJSObject(&obj); + } // Do the security check if necessary - XPCContext* xpcc = XPCContext::GetXPCContext(cx); nsIXPCSecurityManager* sm; @@ -741,7 +740,7 @@ nsJSCID::CreateInstance(nsISupports **_retval) } // If an IID was passed in then use it - const nsID* iid = GetIIDArg(argc, argv, cx); + const nsID* iid = GetIIDArg(optionalArgc, iidval, cx); if (!iid) return NS_ERROR_XPC_BAD_IID; @@ -757,46 +756,26 @@ nsJSCID::CreateInstance(nsISupports **_retval) if (NS_FAILED(rv) || !inst) return NS_ERROR_XPC_CI_RETURNED_FAILURE; - rv = xpc->WrapNativeToJSVal(cx, obj, inst, nsnull, iid, true, vp, nsnull); - if (NS_FAILED(rv) || JSVAL_IS_PRIMITIVE(*vp)) + rv = nsXPConnect::GetXPConnect()->WrapNativeToJSVal(cx, obj, inst, nsnull, iid, true, retval, nsnull); + if (NS_FAILED(rv) || JSVAL_IS_PRIMITIVE(*retval)) return NS_ERROR_XPC_CANT_CREATE_WN; - ccxp->SetReturnValueWasSet(true); return NS_OK; } /* nsISupports getService (); */ NS_IMETHODIMP -nsJSCID::GetService(nsISupports **_retval) +nsJSCID::GetService(const JS::Value& iidval, JSContext* cx, + PRUint8 optionalArgc, JS::Value* retval) { if (!mDetails.IsValid()) return NS_ERROR_XPC_BAD_CID; - nsXPConnect* xpc = nsXPConnect::GetXPConnect(); - if (!xpc) + JSObject* obj = GetWrapperObject(); + if (!obj) { return NS_ERROR_UNEXPECTED; - - nsAXPCNativeCallContext *ccxp = nsnull; - xpc->GetCurrentNativeCallContext(&ccxp); - if (!ccxp) - return NS_ERROR_UNEXPECTED; - - PRUint32 argc; - jsval * argv; - jsval * vp; - JSContext* cx; - JSObject* obj; - - ccxp->GetJSContext(&cx); - ccxp->GetArgc(&argc); - ccxp->GetArgvPtr(&argv); - ccxp->GetRetValPtr(&vp); - - nsCOMPtr wrapper; - ccxp->GetCalleeWrapper(getter_AddRefs(wrapper)); - wrapper->GetJSObject(&obj); + } // Do the security check if necessary - XPCContext* xpcc = XPCContext::GetXPCContext(cx); nsIXPCSecurityManager* sm; @@ -808,7 +787,7 @@ nsJSCID::GetService(nsISupports **_retval) } // If an IID was passed in then use it - const nsID* iid = GetIIDArg(argc, argv, cx); + const nsID* iid = GetIIDArg(optionalArgc, iidval, cx); if (!iid) return NS_ERROR_XPC_BAD_IID; @@ -825,12 +804,11 @@ nsJSCID::GetService(nsISupports **_retval) JSObject* instJSObj; nsCOMPtr holder; - rv = xpc->WrapNative(cx, obj, srvc, *iid, getter_AddRefs(holder)); + rv = nsXPConnect::GetXPConnect()->WrapNative(cx, obj, srvc, *iid, getter_AddRefs(holder)); if (NS_FAILED(rv) || !holder || NS_FAILED(holder->GetJSObject(&instJSObj))) return NS_ERROR_XPC_CANT_CREATE_WN; - *vp = OBJECT_TO_JSVAL(instJSObj); - ccxp->SetReturnValueWasSet(true); + *retval = OBJECT_TO_JSVAL(instJSObj); return NS_OK; } diff --git a/js/xpconnect/src/XPCJSRuntime.cpp b/js/xpconnect/src/XPCJSRuntime.cpp index 99ba6592c4e0..d599b22992f4 100644 --- a/js/xpconnect/src/XPCJSRuntime.cpp +++ b/js/xpconnect/src/XPCJSRuntime.cpp @@ -48,6 +48,7 @@ #include "WrapperFactory.h" #include "dom_quickstubs.h" +#include "jscompartment.h" #include "jsgcchunk.h" #include "jsscope.h" #include "nsIMemoryReporter.h" @@ -531,7 +532,7 @@ static PLDHashOperator SuspectDOMExpandos(nsPtrHashKey *expando, void *arg) { Closure *closure = static_cast(arg); - closure->cb->NoteXPCOMRoot(static_cast(expando->GetKey()->getPrivate())); + closure->cb->NoteXPCOMRoot(static_cast(js::GetObjectPrivate(expando->GetKey()))); return PL_DHASH_NEXT; } @@ -1233,6 +1234,8 @@ CompartmentCallback(JSContext *cx, void *vdata, JSCompartment *compartment) #endif JS_GetTypeInferenceMemoryStats(cx, compartment, &curr->typeInferenceMemory, MemoryReporterMallocSizeOf); + curr->shapesCompartmentTables = + js::GetCompartmentShapeTableSize(compartment, MemoryReporterMallocSizeOf); } void @@ -1272,12 +1275,12 @@ CellCallback(JSContext *cx, void *vdata, void *thing, JSGCTraceKind traceKind, case JSTRACE_OBJECT: { JSObject *obj = static_cast(thing); - if (obj->isFunction()) { + if (JS_ObjectIsFunction(cx, obj)) { curr->gcHeapObjectsFunction += thingSize; } else { curr->gcHeapObjectsNonFunction += thingSize; } - curr->objectSlots += obj->sizeOfSlotsArray(MemoryReporterMallocSizeOf); + curr->objectSlots += js::GetObjectDynamicSlotSize(obj, MemoryReporterMallocSizeOf); break; } case JSTRACE_STRING: @@ -1293,16 +1296,21 @@ CellCallback(JSContext *cx, void *vdata, void *thing, JSGCTraceKind traceKind, if (shape->inDictionary()) { curr->gcHeapShapesDict += thingSize; curr->shapesExtraDictTables += - shape->sizeOfPropertyTableIncludingThis(MemoryReporterMallocSizeOf); + shape->sizeOfPropertyTable(MemoryReporterMallocSizeOf); } else { curr->gcHeapShapesTree += thingSize; curr->shapesExtraTreeTables += - shape->sizeOfPropertyTableIncludingThis(MemoryReporterMallocSizeOf); + shape->sizeOfPropertyTable(MemoryReporterMallocSizeOf); curr->shapesExtraTreeShapeKids += - shape->sizeOfKidsIncludingThis(MemoryReporterMallocSizeOf); + shape->sizeOfKids(MemoryReporterMallocSizeOf); } break; } + case JSTRACE_BASE_SHAPE: + { + curr->gcHeapShapesBase += thingSize; + break; + } case JSTRACE_SCRIPT: { JSScript *script = static_cast(thing); @@ -1602,9 +1610,10 @@ CollectCompartmentStatsForRuntime(JSRuntime *rt, IterateData *data) stats.objectSlots; data->totalShapes += stats.gcHeapShapesTree + stats.gcHeapShapesDict + + stats.gcHeapShapesBase + stats.shapesExtraTreeTables + stats.shapesExtraDictTables + - stats.typeInferenceMemory.emptyShapes; + stats.shapesCompartmentTables; data->totalScripts += stats.gcHeapScripts + stats.scriptData; data->totalStrings += stats.gcHeapStrings + @@ -1718,6 +1727,13 @@ ReportCompartmentStats(const CompartmentStats &stats, "shapes that are in dictionary mode.", callback, closure); + ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name, + "gc-heap/shapes/base"), + JS_GC_HEAP_KIND, stats.gcHeapShapesBase, + "Memory on the compartment's garbage-collected JavaScript heap that collates " + "data common to many shapes.", + callback, closure); + ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name, "gc-heap/type-objects"), JS_GC_HEAP_KIND, stats.gcHeapTypeObjects, @@ -1774,11 +1790,10 @@ ReportCompartmentStats(const CompartmentStats &stats, callback, closure); ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name, - "shapes-extra/empty-shape-arrays"), - nsIMemoryReporter::KIND_HEAP, - stats.typeInferenceMemory.emptyShapes, - "Memory used for arrays attached to prototype JS objects managing shape " - "information.", + "shapes-extra/compartment-tables"), + nsIMemoryReporter::KIND_HEAP, stats.shapesCompartmentTables, + "Memory used by compartment wide tables storing shape information " + "for use during object construction.", callback, closure); ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name, @@ -2001,6 +2016,7 @@ public: nsIMemoryReporter::KIND_OTHER, data.totalShapes, "Memory used for all shape-related data. This is the sum of all " "compartments' 'gc-heap/shapes/tree', 'gc-heap/shapes/dict', " + "'gc-heap/shapes/base', " "'shapes-extra/tree-tables', 'shapes-extra/dict-tables', " "'shapes-extra/tree-shape-kids' and 'shapes-extra/empty-shape-arrays'.", callback, closure); diff --git a/js/xpconnect/src/XPCQuickStubs.cpp b/js/xpconnect/src/XPCQuickStubs.cpp index 5620c03c6fb2..2521f62fbaea 100644 --- a/js/xpconnect/src/XPCQuickStubs.cpp +++ b/js/xpconnect/src/XPCQuickStubs.cpp @@ -147,15 +147,13 @@ PropertyOpForwarder(JSContext *cx, uintN argc, jsval *vp) JSObject *obj = JS_THIS_OBJECT(cx, vp); if (!obj) return false; - jsval v; - if (!JS_GetReservedSlot(cx, callee, 0, &v)) - return false; + jsval v = js::GetFunctionNativeReserved(callee, 0); + JSObject *ptrobj = JSVAL_TO_OBJECT(v); Op *popp = static_cast(JS_GetPrivate(cx, ptrobj)); - if (!JS_GetReservedSlot(cx, callee, 1, &v)) - return false; + v = js::GetFunctionNativeReserved(callee, 1); jsval argval = (argc > 0) ? JS_ARGV(cx, vp)[0] : JSVAL_VOID; jsid id; @@ -187,7 +185,7 @@ GeneratePropertyOp(JSContext *cx, JSObject *obj, jsid id, uintN argc, Op pop) // The JS engine provides two reserved slots on function objects for // XPConnect to use. Use them to stick the necessary info here. JSFunction *fun = - JS_NewFunctionById(cx, PropertyOpForwarder, argc, 0, obj, id); + js::NewFunctionByIdWithReserved(cx, PropertyOpForwarder, argc, 0, obj, id); if (!fun) return false; @@ -206,8 +204,8 @@ GeneratePropertyOp(JSContext *cx, JSObject *obj, jsid id, uintN argc, Op pop) *popp = pop; JS_SetPrivate(cx, ptrobj, popp); - JS_SetReservedSlot(cx, funobj, 0, OBJECT_TO_JSVAL(ptrobj)); - JS_SetReservedSlot(cx, funobj, 1, js::IdToJsval(id)); + js::SetFunctionNativeReserved(funobj, 0, OBJECT_TO_JSVAL(ptrobj)); + js::SetFunctionNativeReserved(funobj, 1, js::IdToJsval(id)); return funobj; } @@ -508,7 +506,7 @@ GetMethodInfo(JSContext *cx, jsval *vp, const char **ifaceNamep, jsid *memberIdp JSObject *funobj = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)); NS_ASSERTION(JS_ObjectIsFunction(cx, funobj), "JSNative callee should be Function object"); - JSString *str = JS_GetFunctionId((JSFunction *) JS_GetPrivate(cx, funobj)); + JSString *str = JS_GetFunctionId(JS_GetObjectFunction(funobj)); jsid methodId = str ? INTERNED_STRING_TO_JSID(cx, str) : JSID_VOID; GetMemberInfo(JSVAL_TO_OBJECT(vp[1]), methodId, ifaceNamep); *memberIdp = methodId; diff --git a/js/xpconnect/src/XPCWrappedNative.cpp b/js/xpconnect/src/XPCWrappedNative.cpp index 237baf1e0163..520de68508a1 100644 --- a/js/xpconnect/src/XPCWrappedNative.cpp +++ b/js/xpconnect/src/XPCWrappedNative.cpp @@ -2434,8 +2434,7 @@ CallMethodHelper::GatherAndConvertResults() !GetInterfaceTypeFromParam(i, datum_type, ¶m_iid)) return false; - uintN err; - + nsresult err; if (isArray) { XPCLazyCallContext lccx(mCallContext); if (!XPCConvert::NativeArray2JS(lccx, &v, (const void**)&dp->val, diff --git a/js/xpconnect/src/XPCWrappedNativeInfo.cpp b/js/xpconnect/src/XPCWrappedNativeInfo.cpp index 0ac2a761962a..4cb76d81546d 100644 --- a/js/xpconnect/src/XPCWrappedNativeInfo.cpp +++ b/js/xpconnect/src/XPCWrappedNativeInfo.cpp @@ -55,8 +55,8 @@ XPCNativeMember::GetCallInfo(XPCCallContext& ccx, XPCNativeMember** pMember) { funobj = js::UnwrapObject(funobj); - jsval ifaceVal = js::GetReservedSlot(funobj, 0); - jsval memberVal = js::GetReservedSlot(funobj, 1); + jsval ifaceVal = js::GetFunctionNativeReserved(funobj, 0); + jsval memberVal = js::GetFunctionNativeReserved(funobj, 1); *pInterface = (XPCNativeInterface*) JSVAL_TO_PRIVATE(ifaceVal); *pMember = (XPCNativeMember*) JSVAL_TO_PRIVATE(memberVal); @@ -125,7 +125,7 @@ XPCNativeMember::Resolve(XPCCallContext& ccx, XPCNativeInterface* iface, callback = XPC_WN_GetterSetter; } - JSFunction *fun = JS_NewFunctionById(ccx, callback, argc, 0, parent, GetName()); + JSFunction *fun = js::NewFunctionByIdWithReserved(ccx, callback, argc, 0, parent, GetName()); if (!fun) return false; @@ -133,9 +133,8 @@ XPCNativeMember::Resolve(XPCCallContext& ccx, XPCNativeInterface* iface, if (!funobj) return false; - if (!JS_SetReservedSlot(ccx, funobj, 0, PRIVATE_TO_JSVAL(iface))|| - !JS_SetReservedSlot(ccx, funobj, 1, PRIVATE_TO_JSVAL(this))) - return false; + js::SetFunctionNativeReserved(funobj, 0, PRIVATE_TO_JSVAL(iface)); + js::SetFunctionNativeReserved(funobj, 1, PRIVATE_TO_JSVAL(this)); *vp = OBJECT_TO_JSVAL(funobj); diff --git a/js/xpconnect/src/dombindings.cpp b/js/xpconnect/src/dombindings.cpp index bc4caa83509d..128bffb1313f 100644 --- a/js/xpconnect/src/dombindings.cpp +++ b/js/xpconnect/src/dombindings.cpp @@ -253,19 +253,19 @@ ListBase::getListObject(JSObject *obj) } template -uint32 +js::Shape * ListBase::getProtoShape(JSObject *obj) { JS_ASSERT(objIsList(obj)); - return js::GetProxyExtra(obj, JSPROXYSLOT_PROTOSHAPE).toPrivateUint32(); + return (js::Shape *) js::GetProxyExtra(obj, JSPROXYSLOT_PROTOSHAPE).toPrivate(); } template void -ListBase::setProtoShape(JSObject *obj, uint32 shape) +ListBase::setProtoShape(JSObject *obj, js::Shape *shape) { JS_ASSERT(objIsList(obj)); - js::SetProxyExtra(obj, JSPROXYSLOT_PROTOSHAPE, PrivateUint32Value(shape)); + js::SetProxyExtra(obj, JSPROXYSLOT_PROTOSHAPE, PrivateValue(shape)); } template @@ -273,7 +273,7 @@ bool ListBase::instanceIsListObject(JSContext *cx, JSObject *obj, JSObject *callee) { if (XPCWrapper::IsSecurityWrapper(obj)) { - if (callee && js::GetObjectGlobal(obj) == js::GetObjectGlobal(callee)) { + if (callee && JS_GetGlobalForObject(cx, obj) == JS_GetGlobalForObject(cx, callee)) { obj = js::UnwrapObject(obj); } else { obj = XPCWrapper::Unwrap(cx, obj); @@ -480,7 +480,7 @@ ListBase::create(JSContext *cx, XPCWrappedNativeScope *scope, ListType *aLis return NULL; JSAutoEnterCompartment ac; - if (js::GetObjectGlobal(parent) != scope->GetGlobalJSObject()) { + if (js::GetGlobalForObjectCrossCompartment(parent) != scope->GetGlobalJSObject()) { if (!ac.enter(cx, parent)) return NULL; @@ -498,7 +498,7 @@ ListBase::create(JSContext *cx, XPCWrappedNativeScope *scope, ListType *aLis return NULL; NS_ADDREF(aList); - setProtoShape(obj, -1); + setProtoShape(obj, NULL); aWrapperCache->SetWrapper(obj); @@ -855,7 +855,7 @@ ListBase::shouldCacheProtoShape(JSContext *cx, JSObject *proto, bool *should if (!JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, &desc)) return false; if (desc.obj != proto || desc.getter || JSVAL_IS_PRIMITIVE(desc.value) || - n >= js::GetNumSlots(proto) || js::GetSlot(proto, n) != desc.value || + n >= js::GetObjectSlotSpan(proto) || js::GetObjectSlot(proto, n) != desc.value || !JS_IsNativeFunction(JSVAL_TO_OBJECT(desc.value), sProtoMethods[n].native)) { *shouldCache = false; return true; @@ -932,7 +932,7 @@ ListBase::nativeGet(JSContext *cx, JSObject *proxy, JSObject *proto, jsid id if (!vp) return true; - *vp = js::GetSlot(proto, n); + *vp = js::GetObjectSlot(proto, n); JS_ASSERT(JS_IsNativeFunction(&vp->toObject(), sProtoMethods[n].native)); return true; } diff --git a/js/xpconnect/src/dombindings.h b/js/xpconnect/src/dombindings.h index 6864fa412d16..30f855c69939 100644 --- a/js/xpconnect/src/dombindings.h +++ b/js/xpconnect/src/dombindings.h @@ -187,8 +187,8 @@ private: static JSObject *ensureExpandoObject(JSContext *cx, JSObject *obj); - static uint32 getProtoShape(JSObject *obj); - static void setProtoShape(JSObject *obj, uint32 shape); + static js::Shape *getProtoShape(JSObject *obj); + static void setProtoShape(JSObject *obj, js::Shape *shape); static JSBool length_getter(JSContext *cx, JSObject *obj, jsid id, jsval *vp); diff --git a/js/xpconnect/src/nsXPConnect.cpp b/js/xpconnect/src/nsXPConnect.cpp index 113679bcbee8..95ffdc03e7b9 100644 --- a/js/xpconnect/src/nsXPConnect.cpp +++ b/js/xpconnect/src/nsXPConnect.cpp @@ -830,11 +830,11 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb) JSBool markJSObject = false; if (traceKind == JSTRACE_OBJECT) { obj = static_cast(p); - clazz = obj->getClass(); + clazz = js::GetObjectClass(obj); if (clazz == &XPC_WN_Tearoff_JSClass) { XPCWrappedNative *wrapper = - (XPCWrappedNative*)xpc_GetJSPrivate(obj->getParent()); + (XPCWrappedNative*)xpc_GetJSPrivate(js::GetObjectParent(obj)); dontTraverse = WrapperIsNotMainThreadOnly(wrapper); } else if (IS_WRAPPER_CLASS(clazz) && IS_WN_WRAPPER_OBJECT(obj)) { XPCWrappedNative *wrapper = (XPCWrappedNative*)xpc_GetJSPrivate(obj); @@ -901,16 +901,15 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb) "Script", "Xml", "Shape", + "BaseShape", "TypeObject", }; JS_STATIC_ASSERT(NS_ARRAY_LENGTH(trace_types) == JSTRACE_LAST + 1); JS_snprintf(name, sizeof(name), "JS %s", trace_types[traceKind]); } - if (traceKind == JSTRACE_OBJECT) { - JSObject *global = static_cast(p), *parent; - while ((parent = global->getParent())) - global = parent; + if(traceKind == JSTRACE_OBJECT) { + JSObject *global = JS_GetGlobalForObject(NULL, static_cast(p)); char fullname[100]; JS_snprintf(fullname, sizeof(fullname), "%s (global=%p)", name, global); @@ -1302,7 +1301,7 @@ nsXPConnect::InitClassesWithNewWrappedGlobal(JSContext * aJSContext, // voodoo to fixup scoping and parenting... - JS_ASSERT(!globalJSObj->getParent()); + JS_ASSERT(!js::GetObjectParent(globalJSObj)); JSObject* oldGlobal = JS_GetGlobalObject(aJSContext); if (!oldGlobal || oldGlobal == tempGlobal) @@ -2078,7 +2077,7 @@ nsXPConnect::RestoreWrappedNativePrototype(JSContext * aJSContext, if (NS_FAILED(rv)) return UnexpectedFailure(rv); - if (!IS_PROTO_CLASS(protoJSObject->getClass())) + if (!IS_PROTO_CLASS(js::GetObjectClass(protoJSObject))) return UnexpectedFailure(NS_ERROR_INVALID_ARG); XPCWrappedNativeScope* scope = @@ -2729,7 +2728,7 @@ nsXPConnect::GetSafeJSContext(JSContext * *aSafeJSContext) nsIPrincipal* nsXPConnect::GetPrincipal(JSObject* obj, bool allowShortCircuit) const { - NS_ASSERTION(IS_WRAPPER_CLASS(obj->getClass()), + NS_ASSERTION(IS_WRAPPER_CLASS(js::GetObjectClass(obj)), "What kind of wrapper is this?"); if (IS_WN_WRAPPER_OBJECT(obj)) { diff --git a/js/xpconnect/src/xpcprivate.h b/js/xpconnect/src/xpcprivate.h index eb2b64c7852c..766fbc92c168 100644 --- a/js/xpconnect/src/xpcprivate.h +++ b/js/xpconnect/src/xpcprivate.h @@ -1630,7 +1630,7 @@ public: { JS_ASSERT(js::GetObjectClass(obj)->flags & JSCLASS_XPCONNECT_GLOBAL); - const js::Value &v = js::GetSlot(obj, JSCLASS_GLOBAL_SLOT_COUNT); + const js::Value &v = js::GetObjectSlot(obj, JSCLASS_GLOBAL_SLOT_COUNT); return v.isUndefined() ? nsnull : static_cast(v.toPrivate()); @@ -3327,7 +3327,14 @@ public: static JSBool JSArray2Native(XPCCallContext& ccx, void** d, jsval s, JSUint32 count, const nsXPTType& type, - const nsID* iid, uintN* pErr); + const nsID* iid, nsresult* pErr); + + static JSBool JSTypedArray2Native(XPCCallContext& ccx, + void** d, + JSObject* jsarray, + JSUint32 count, + const nsXPTType& type, + nsresult* pErr); static JSBool NativeStringWithSize2JS(JSContext* cx, jsval* d, const void* s, diff --git a/js/xpconnect/src/xpcpublic.h b/js/xpconnect/src/xpcpublic.h index 6117c220359a..2b82dcc6aa70 100644 --- a/js/xpconnect/src/xpcpublic.h +++ b/js/xpconnect/src/xpcpublic.h @@ -207,6 +207,7 @@ struct CompartmentStats PRInt64 gcHeapStrings; PRInt64 gcHeapShapesTree; PRInt64 gcHeapShapesDict; + PRInt64 gcHeapShapesBase; PRInt64 gcHeapScripts; PRInt64 gcHeapTypeObjects; PRInt64 gcHeapXML; @@ -216,6 +217,7 @@ struct CompartmentStats PRInt64 shapesExtraTreeTables; PRInt64 shapesExtraDictTables; PRInt64 shapesExtraTreeShapeKids; + PRInt64 shapesCompartmentTables; PRInt64 scriptData; #ifdef JS_METHODJIT diff --git a/js/xpconnect/tests/components/js/xpctest_params.js b/js/xpconnect/tests/components/js/xpctest_params.js index c38a1360564c..72700a8aa555 100644 --- a/js/xpconnect/tests/components/js/xpctest_params.js +++ b/js/xpconnect/tests/components/js/xpctest_params.js @@ -97,7 +97,7 @@ TestParams.prototype = { testACString: f, testJsval: f, testShortArray: f_is, - testLongLongArray: f_is, + testDoubleArray: f_is, testStringArray: f_is, testWstringArray: f_is, testInterfaceArray: f_is, diff --git a/js/xpconnect/tests/components/native/xpctest_params.cpp b/js/xpconnect/tests/components/native/xpctest_params.cpp index 103034df847e..0e9bea415eea 100644 --- a/js/xpconnect/tests/components/native/xpctest_params.cpp +++ b/js/xpconnect/tests/components/native/xpctest_params.cpp @@ -246,14 +246,14 @@ NS_IMETHODIMP nsXPCTestParams::TestShortArray(PRUint32 aLength, PRInt16 *a, BUFFER_METHOD_IMPL(PRInt16, 0, TAKE_OWNERSHIP_NOOP); } -/* void testLongLongArray (in unsigned long aLength, [array, size_is (aLength)] in long long a, - * inout unsigned long bLength, [array, size_is (bLength)] inout long long b, - * out unsigned long rvLength, [array, size_is (rvLength), retval] out long long rv); */ -NS_IMETHODIMP nsXPCTestParams::TestLongLongArray(PRUint32 aLength, PRInt64 *a, - PRUint32 *bLength NS_INOUTPARAM, PRInt64 **b NS_INOUTPARAM, - PRUint32 *rvLength NS_OUTPARAM, PRInt64 **rv NS_OUTPARAM) +/* void testDoubleArray (in unsigned long aLength, [array, size_is (aLength)] in double a, + * inout unsigned long bLength, [array, size_is (bLength)] inout double b, + * out unsigned long rvLength, [array, size_is (rvLength), retval] out double rv); */ +NS_IMETHODIMP nsXPCTestParams::TestDoubleArray(PRUint32 aLength, double *a, + PRUint32 *bLength NS_INOUTPARAM, double **b NS_INOUTPARAM, + PRUint32 *rvLength NS_OUTPARAM, double **rv NS_OUTPARAM) { - BUFFER_METHOD_IMPL(PRInt64, 0, TAKE_OWNERSHIP_NOOP); + BUFFER_METHOD_IMPL(double, 0, TAKE_OWNERSHIP_NOOP); } /* void testStringArray (in unsigned long aLength, [array, size_is (aLength)] in string a, diff --git a/js/xpconnect/tests/idl/xpctest_params.idl b/js/xpconnect/tests/idl/xpctest_params.idl index 5a37a2753ae0..592d312f8055 100644 --- a/js/xpconnect/tests/idl/xpctest_params.idl +++ b/js/xpconnect/tests/idl/xpctest_params.idl @@ -47,7 +47,7 @@ interface nsIXPCTestInterfaceA; interface nsIXPCTestInterfaceB; -[scriptable, uuid(b94cd289-d0df-4d25-8995-facf687d921d)] +[scriptable, uuid(fe2b7433-ac3b-49ef-9344-b67228bfdd46)] interface nsIXPCTestParams : nsISupports { // These types correspond to the ones in typelib.py @@ -79,9 +79,9 @@ interface nsIXPCTestParams : nsISupports { void testShortArray(in unsigned long aLength, [array, size_is(aLength)] in short a, inout unsigned long bLength, [array, size_is(bLength)] inout short b, out unsigned long rvLength, [retval, array, size_is(rvLength)] out short rv); - void testLongLongArray(in unsigned long aLength, [array, size_is(aLength)] in long long a, - inout unsigned long bLength, [array, size_is(bLength)] inout long long b, - out unsigned long rvLength, [retval, array, size_is(rvLength)] out long long rv); + void testDoubleArray(in unsigned long aLength, [array, size_is(aLength)] in double a, + inout unsigned long bLength, [array, size_is(bLength)] inout double b, + out unsigned long rvLength, [retval, array, size_is(rvLength)] out double rv); void testStringArray(in unsigned long aLength, [array, size_is(aLength)] in string a, inout unsigned long bLength, [array, size_is(bLength)] inout string b, out unsigned long rvLength, [retval, array, size_is(rvLength)] out string rv); diff --git a/js/xpconnect/tests/unit/test_params.js b/js/xpconnect/tests/unit/test_params.js index 4e2b08a9b00b..870e8077c99f 100644 --- a/js/xpconnect/tests/unit/test_params.js +++ b/js/xpconnect/tests/unit/test_params.js @@ -119,6 +119,22 @@ function test_component(contractid) { do_check_true(dotEqualsComparator(val1IID, bIID.value)); } + // Check that the given call (type mismatch) results in an exception being thrown. + function doTypedArrayMismatchTest(name, val1, val1Size, val2, val2Size) { + var comparator = arrayComparator(standardComparator); + var error = false; + try { + doIsTest(name, val1, val1Size, val2, val2Size, comparator); + + // An exception was not thrown as would have been expected. + do_check_true(false); + } + catch (e) { + // An exception was thrown as expected. + do_check_true(true); + } + } + // Workaround for bug 687612 (inout parameters broken for dipper types). // We do a simple test of copying a into b, and ignore the rv. function doTestWorkaround(name, val1) { @@ -165,7 +181,8 @@ function test_component(contractid) { // Test arrays. doIsTest("testShortArray", [2, 4, 6], 3, [1, 3, 5, 7], 4, arrayComparator(standardComparator)); - doIsTest("testLongLongArray", [-10000000000], 1, [1, 3, 1234511234551], 3, arrayComparator(standardComparator)); + doIsTest("testDoubleArray", [-10, -0.5], 2, [1, 3, 1e11, -8e-5 ], 4, arrayComparator(fuzzComparator)); + doIsTest("testStringArray", ["mary", "hat", "hey", "lid", "tell", "lam"], 6, ["ids", "fleas", "woes", "wide", "has", "know", "!"], 7, arrayComparator(standardComparator)); doIsTest("testWstringArray", ["沒有語言", "的偉大嗎?]"], 2, @@ -173,6 +190,13 @@ function test_component(contractid) { doIsTest("testInterfaceArray", [makeA(), makeA()], 2, [makeA(), makeA(), makeA(), makeA(), makeA(), makeA()], 6, arrayComparator(interfaceComparator)); + // Test typed arrays and ArrayBuffer aliasing. + var arrayBuffer = new ArrayBuffer(16); + var int16Array = new Int16Array(arrayBuffer, 2, 3); + int16Array.set([-32768, 0, 32767]); + doIsTest("testShortArray", int16Array, 3, new Int16Array([1773, -32768, 32767, 7]), 4, arrayComparator(standardComparator)); + doIsTest("testDoubleArray", new Float64Array([-10, -0.5]), 2, new Float64Array([0, 3.2, 1.0e10, -8.33 ]), 4, arrayComparator(fuzzComparator)); + // Test sized strings. var ssTests = ["Tis not possible, I muttered", "give me back my free hardcore!", "quoth the server:", "4〠4"]; doIsTest("testSizedString", ssTests[0], ssTests[0].length, ssTests[1], ssTests[1].length, standardComparator); @@ -186,4 +210,12 @@ function test_component(contractid) { // Test arrays of iids. doIs2Test("testInterfaceIsArray", [makeA(), makeA(), makeA(), makeA(), makeA()], 5, Ci['nsIXPCTestInterfaceA'], [makeB(), makeB(), makeB()], 3, Ci['nsIXPCTestInterfaceB']); + + // Test incorrect (too big) array size parameter; this should throw NOT_ENOUGH_ELEMENTS. + doTypedArrayMismatchTest("testShortArray", Int16Array([-3, 7, 4]), 4, + Int16Array([1, -32, 6]), 3); + + // Test type mismatch (int16 <-> uint16); this should throw BAD_CONVERT_JS. + doTypedArrayMismatchTest("testShortArray", Uint16Array([0, 7, 4, 3]), 4, + Uint16Array([1, 5, 6]), 3); } diff --git a/js/xpconnect/wrappers/XrayWrapper.cpp b/js/xpconnect/wrappers/XrayWrapper.cpp index 48b80f7790d4..32f7a2c3e2d7 100644 --- a/js/xpconnect/wrappers/XrayWrapper.cpp +++ b/js/xpconnect/wrappers/XrayWrapper.cpp @@ -1089,7 +1089,8 @@ GetHolderObject(JSContext *cx, JSObject *wrapper, bool createHolder = true) if (!js::GetProxyExtra(wrapper, 0).isUndefined()) return &js::GetProxyExtra(wrapper, 0).toObject(); - JSObject *obj = JS_NewObjectWithGivenProto(cx, nsnull, nsnull, js::GetObjectGlobal(wrapper)); + JSObject *obj = JS_NewObjectWithGivenProto(cx, nsnull, nsnull, + JS_GetGlobalForObject(cx, wrapper)); if (!obj) return nsnull; js::SetProxyExtra(wrapper, 0, ObjectValue(*obj)); diff --git a/layout/base/nsCSSRendering.cpp b/layout/base/nsCSSRendering.cpp index 64c09f189f3b..0dc431ae6b55 100644 --- a/layout/base/nsCSSRendering.cpp +++ b/layout/base/nsCSSRendering.cpp @@ -3836,9 +3836,8 @@ ImageRenderer::PrepareImage() // If the referenced element doesn't have a frame we might still be able // to paint it if it's an , , or