This commit is contained in:
Ryan VanderMeulen 2015-02-02 17:28:12 -05:00
Родитель 0ace9ac204 e59559ca29
Коммит 769dd38513
184 изменённых файлов: 3664 добавлений и 3111 удалений

Просмотреть файл

@ -81,6 +81,10 @@ automation/upload: automation/update-packaging
# binaries/libs, and that's what we package/test.
automation/pretty-package: automation/buildsymbols
# The installer and packager both run stage-package, and may conflict
# with each other.
automation/installer: automation/package
# The 'pretty' versions of targets run before the regular ones to avoid
# conflicts in writing to the same files.
automation/installer: automation/pretty-installer

Просмотреть файл

@ -46,7 +46,7 @@ leak:mozilla::TransportLayerDtls::Setup
###
# Bug 981195 - Small leak in the parser. m4
leak:TypeCompartment::fixObjectType
leak:TypeCompartment::fixObjectGroup
# Bug 982111 - WebM is leaking. m1
leak:nestegg_read_packet

Просмотреть файл

@ -6,7 +6,9 @@
#include "mozilla/dom/Console.h"
#include "mozilla/dom/ConsoleBinding.h"
#include "mozilla/dom/BlobBinding.h"
#include "mozilla/dom/Exceptions.h"
#include "mozilla/dom/File.h"
#include "mozilla/dom/ToJSValue.h"
#include "mozilla/Maybe.h"
#include "nsCycleCollectionParticipant.h"
@ -41,9 +43,10 @@
// console.trace().
#define DEFAULT_MAX_STACKTRACE_DEPTH 200
// This tag is used in the Structured Clone Algorithm to move js values from
// This tags are used in the Structured Clone Algorithm to move js values from
// worker thread to main thread
#define CONSOLE_TAG JS_SCTAG_USER_MIN
#define CONSOLE_TAG_STRING JS_SCTAG_USER_MIN
#define CONSOLE_TAG_BLOB JS_SCTAG_USER_MIN + 1
using namespace mozilla::dom::exceptions;
using namespace mozilla::dom::workers;
@ -51,6 +54,14 @@ using namespace mozilla::dom::workers;
namespace mozilla {
namespace dom {
struct
ConsoleStructuredCloneData
{
nsCOMPtr<nsISupports> mParent;
nsTArray<nsString> mStrings;
nsTArray<nsRefPtr<FileImpl>> mFiles;
};
/**
* Console API in workers uses the Structured Clone Algorithm to move any value
* from the worker thread to the main-thread. Some object cannot be moved and,
@ -63,29 +74,47 @@ namespace dom {
static JSObject*
ConsoleStructuredCloneCallbacksRead(JSContext* aCx,
JSStructuredCloneReader* /* unused */,
uint32_t aTag, uint32_t aData,
uint32_t aTag, uint32_t aIndex,
void* aClosure)
{
AssertIsOnMainThread();
ConsoleStructuredCloneData* data =
static_cast<ConsoleStructuredCloneData*>(aClosure);
MOZ_ASSERT(data);
if (aTag != CONSOLE_TAG) {
return nullptr;
if (aTag == CONSOLE_TAG_STRING) {
MOZ_ASSERT(data->mStrings.Length() > aIndex);
JS::Rooted<JS::Value> value(aCx);
if (!xpc::StringToJsval(aCx, data->mStrings.ElementAt(aIndex), &value)) {
return nullptr;
}
JS::Rooted<JSObject*> obj(aCx);
if (!JS_ValueToObject(aCx, value, &obj)) {
return nullptr;
}
return obj;
}
nsTArray<nsString>* strings = static_cast<nsTArray<nsString>*>(aClosure);
MOZ_ASSERT(strings->Length() > aData);
if (aTag == CONSOLE_TAG_BLOB) {
MOZ_ASSERT(data->mFiles.Length() > aIndex);
JS::Rooted<JS::Value> value(aCx);
if (!xpc::StringToJsval(aCx, strings->ElementAt(aData), &value)) {
return nullptr;
JS::Rooted<JS::Value> val(aCx);
{
nsRefPtr<File> file =
new File(data->mParent, data->mFiles.ElementAt(aIndex));
if (!GetOrCreateDOMReflector(aCx, file, &val)) {
return nullptr;
}
}
return &val.toObject();
}
JS::Rooted<JSObject*> obj(aCx);
if (!JS_ValueToObject(aCx, value, &obj)) {
return nullptr;
}
return obj;
MOZ_CRASH("No other tags are supported.");
return nullptr;
}
// This method is called by the Structured Clone Algorithm when some data has
@ -96,6 +125,21 @@ ConsoleStructuredCloneCallbacksWrite(JSContext* aCx,
JS::Handle<JSObject*> aObj,
void* aClosure)
{
ConsoleStructuredCloneData* data =
static_cast<ConsoleStructuredCloneData*>(aClosure);
MOZ_ASSERT(data);
nsRefPtr<File> file;
if (NS_SUCCEEDED(UNWRAP_OBJECT(Blob, aObj, file)) &&
file->Impl()->MayBeClonedToOtherThreads()) {
if (!JS_WriteUint32Pair(aWriter, CONSOLE_TAG_BLOB, data->mFiles.Length())) {
return false;
}
data->mFiles.AppendElement(file->Impl());
return true;
}
JS::Rooted<JS::Value> value(aCx, JS::ObjectOrNullValue(aObj));
JS::Rooted<JSString*> jsString(aCx, JS::ToString(aCx, value));
if (!jsString) {
@ -107,14 +151,12 @@ ConsoleStructuredCloneCallbacksWrite(JSContext* aCx,
return false;
}
nsTArray<nsString>* strings = static_cast<nsTArray<nsString>*>(aClosure);
if (!JS_WriteUint32Pair(aWriter, CONSOLE_TAG, strings->Length())) {
if (!JS_WriteUint32Pair(aWriter, CONSOLE_TAG_STRING,
data->mStrings.Length())) {
return false;
}
strings->AppendElement(string);
data->mStrings.AppendElement(string);
return true;
}
@ -414,7 +456,7 @@ private:
JS::Rooted<JS::Value> value(aCx, JS::ObjectValue(*arguments));
if (!mArguments.write(aCx, value, &gConsoleCallbacks, &mStrings)) {
if (!mArguments.write(aCx, value, &gConsoleCallbacks, &mData)) {
return false;
}
@ -451,8 +493,13 @@ private:
mCallData->SetIDs(id, frame.mFilename);
}
// Now we could have the correct window (if we are not window-less).
mData.mParent = aInnerWindow;
ProcessCallData(aCx);
mCallData->CleanupJSObjects();
mData.mParent = nullptr;
}
private:
@ -462,7 +509,7 @@ private:
ClearException ce(aCx);
JS::Rooted<JS::Value> argumentsValue(aCx);
if (!mArguments.read(aCx, &argumentsValue, &gConsoleCallbacks, &mStrings)) {
if (!mArguments.read(aCx, &argumentsValue, &gConsoleCallbacks, &mData)) {
return;
}
@ -494,7 +541,7 @@ private:
ConsoleCallData* mCallData;
JSAutoStructuredCloneBuffer mArguments;
nsTArray<nsString> mStrings;
ConsoleStructuredCloneData mData;
};
// This runnable calls ProfileMethod() on the console on the main-thread.
@ -539,7 +586,7 @@ private:
JS::Rooted<JS::Value> value(aCx, JS::ObjectValue(*arguments));
if (!mBuffer.write(aCx, value, &gConsoleCallbacks, &mStrings)) {
if (!mBuffer.write(aCx, value, &gConsoleCallbacks, &mData)) {
return false;
}
@ -552,8 +599,14 @@ private:
{
ClearException ce(aCx);
// Now we could have the correct window (if we are not window-less).
mData.mParent = aInnerWindow;
JS::Rooted<JS::Value> argumentsValue(aCx);
if (!mBuffer.read(aCx, &argumentsValue, &gConsoleCallbacks, &mStrings)) {
bool ok = mBuffer.read(aCx, &argumentsValue, &gConsoleCallbacks, &mData);
mData.mParent = nullptr;
if (!ok) {
return;
}
@ -585,7 +638,7 @@ private:
Sequence<JS::Value> mArguments;
JSAutoStructuredCloneBuffer mBuffer;
nsTArray<nsString> mStrings;
ConsoleStructuredCloneData mData;
};
NS_IMPL_CYCLE_COLLECTION_CLASS(Console)

Просмотреть файл

@ -4413,7 +4413,7 @@ nsGlobalWindow::GetApplicationCache(nsIDOMOfflineResourceList **aApplicationCach
return rv.ErrorCode();
}
nsIDOMCrypto*
Crypto*
nsGlobalWindow::GetCrypto(ErrorResult& aError)
{
FORWARD_TO_INNER_OR_THROW(GetCrypto, (aError), aError, nullptr);

Просмотреть файл

@ -74,7 +74,6 @@ class nsIBaseWindow;
class nsIContent;
class nsICSSDeclaration;
class nsIDocShellTreeOwner;
class nsIDOMCrypto;
class nsIDOMOfflineResourceList;
class nsIScrollableFrame;
class nsIControllers;
@ -101,6 +100,7 @@ class DOMEventTargetHelper;
namespace dom {
class BarProp;
class Console;
class Crypto;
class External;
class Function;
class Gamepad;
@ -995,7 +995,7 @@ public:
}
int64_t GetMozAnimationStartTime(mozilla::ErrorResult& aError);
void SizeToContent(mozilla::ErrorResult& aError);
nsIDOMCrypto* GetCrypto(mozilla::ErrorResult& aError);
mozilla::dom::Crypto* GetCrypto(mozilla::ErrorResult& aError);
nsIControllers* GetControllers(mozilla::ErrorResult& aError);
mozilla::dom::Element* GetRealFrameElement(mozilla::ErrorResult& aError);
float GetMozInnerScreenX(mozilla::ErrorResult& aError);
@ -1556,7 +1556,7 @@ protected:
nsString mStatus;
nsString mDefaultStatus;
nsGlobalWindowObserver* mObserver; // Inner windows only.
nsCOMPtr<nsIDOMCrypto> mCrypto;
nsRefPtr<mozilla::dom::Crypto> mCrypto;
nsRefPtr<mozilla::dom::Console> mConsole;
// We need to store an nsISupports pointer to this object because the
// mozilla::dom::External class doesn't exist on b2g and using the type

Просмотреть файл

@ -2354,6 +2354,13 @@ SetMemoryGCSliceTimePrefChangedCallback(const char* aPrefName, void* aClosure)
JS_SetGCParameter(sRuntime, JSGC_SLICE_TIME_BUDGET, pref);
}
static void
SetMemoryGCCompactingPrefChangedCallback(const char* aPrefName, void* aClosure)
{
bool pref = Preferences::GetBool(aPrefName);
JS_SetGCParameter(sRuntime, JSGC_COMPACTING_ENABLED, pref);
}
static void
SetMemoryGCPrefChangedCallback(const char* aPrefName, void* aClosure)
{
@ -2616,6 +2623,9 @@ nsJSContext::EnsureStatics()
Preferences::RegisterCallbackAndCall(SetMemoryGCSliceTimePrefChangedCallback,
"javascript.options.mem.gc_incremental_slice_ms");
Preferences::RegisterCallbackAndCall(SetMemoryGCCompactingPrefChangedCallback,
"javascript.options.mem.gc_compacting");
Preferences::RegisterCallbackAndCall(SetMemoryGCPrefChangedCallback,
"javascript.options.mem.gc_high_frequency_time_limit_ms",
(void *)JSGC_HIGH_FREQUENCY_TIME_LIMIT);

Просмотреть файл

@ -5489,6 +5489,12 @@ EventStateManager::WheelPrefs::NeedToComputeLineOrPageDelta(
(mMultiplierY[index] != 1.0 && mMultiplierY[index] != -1.0);
}
bool
EventStateManager::WheelEventIsScrollAction(WidgetWheelEvent* aEvent)
{
return WheelPrefs::GetInstance()->ComputeActionFor(aEvent) == WheelPrefs::ACTION_SCROLL;
}
bool
EventStateManager::WheelPrefs::IsOverOnePageScrollAllowedX(
WidgetWheelEvent* aEvent)

Просмотреть файл

@ -224,6 +224,9 @@ public:
static LayoutDeviceIntPoint GetChildProcessOffset(nsFrameLoader* aFrameLoader,
const WidgetEvent& aEvent);
// Returns true if the given WidgetWheelEvent will resolve to a scroll action.
static bool WheelEventIsScrollAction(WidgetWheelEvent* aEvent);
// Holds the point in screen coords that a mouse event was dispatched to,
// before we went into pointer lock mode. This is constantly updated while
// the pointer is not locked, but we don't update it while the pointer is

Просмотреть файл

@ -3016,6 +3016,12 @@ void HTMLMediaElement::MetadataLoaded(const MediaInfo* aInfo,
mTags = aTags.forget();
mLoadedDataFired = false;
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
if (mIsEncrypted) {
nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
obs->NotifyObservers(static_cast<nsIContent*>(this), "media-eme-metadataloaded", nullptr);
}
DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
if (IsVideo() && mHasVideo) {
mMediaSize = aInfo->mVideo.mDisplay;

Просмотреть файл

@ -368,6 +368,8 @@ skip-if = buildapp == 'b2g' && toolkit != 'gonk' # bug 1082984
skip-if = buildapp == 'b2g' || toolkit == 'android' || e10s # bug 1043403, bug 1057908
[test_eme_canvas_blocked.html]
skip-if = buildapp == 'b2g' || toolkit == 'android' || e10s # bug 1043403, bug 1057908
[test_eme_obs_notification.html]
skip-if = buildapp == 'b2g' || toolkit == 'android' || e10s # bug 1043403, bug 1057908
[test_eme_persistent_sessions.html]
skip-if = buildapp == 'b2g' || toolkit == 'android' || e10s # bug 1043403, bug 1057908
[test_eme_playback.html]

Просмотреть файл

@ -0,0 +1,81 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test Encrypted Media Extensions</title>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
<script type="text/javascript" src="manifest.js"></script>
<script type="text/javascript" src="eme.js"></script>
</head>
<body>
<pre id="test">
<script class="testbody" type="text/javascript">
var manager = new MediaTestManager;
var videos = new Set();
var observedVideos = new Set();
var observer = function(subject) {
ok(videos.has(subject), "Video should be known to us");
videos.delete(subject);
observedVideos.add(subject);
};
SpecialPowers.Services.obs.addObserver(observer, "media-eme-metadataloaded", false);
// When the test manager finishes, these sets should all be empty again:
manager.onFinished = function() {
is(videos.size, 0, "video set should be empty");
is(observedVideos.size, 0, "observed video set should be empty");
};
// ... but even if they're not, we should clear them out when we finish:
SimpleTest.registerCleanupFunction(function() {
SpecialPowers.Services.obs.removeObserver(observer, "media-eme-metadataloaded");
videos.clear();
observedVideos.clear();
});
function startTest(test, token)
{
manager.started(token);
var sessions = [];
var v = SetupEME(test, token);
videos.add(v);
v.preload = "auto"; // Required due to "canplay" not firing for MSE unless we do this.
v.addEventListener("loadeddata", function(ev) {
ok(observedVideos.has(ev.target), "Should have been told about eme video through observer as well.");
observedVideos.delete(ev.target);
manager.finished(token);
});
LoadTest(test, v, token);
}
function beginTest() {
manager.runTests(gEMETests, startTest);
}
var prefs = [
[ "media.mediasource.enabled", true ],
[ "media.mediasource.youtubeonly", false ],
[ "media.mediasource.mp4.enabled", true ],
];
if (/Linux/.test(navigator.userAgent) ||
!document.createElement('video').canPlayType("video/mp4")) {
// XXX remove once we have mp4 PlatformDecoderModules on all platforms.
prefs.push([ "media.fragmented-mp4.exposed", true ]);
prefs.push([ "media.fragmented-mp4.use-blank-decoder", true ]);
}
SimpleTest.waitForExplicitFinish();
SpecialPowers.pushPrefEnv({ "set" : prefs }, beginTest);
</script>
</pre>
</body>
</html>

Просмотреть файл

@ -29,8 +29,10 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=1022913
var onSecondSeekComplete = function(event) {
var v = event.target;
v.removeEventListener("seeked", onSecondSeekComplete);
ok(v.currentTime >= v.firstSeekTarget, v.name + " seek never go backwards. time=" + v.currentTime + " firstSeekTarget=" + v.firstSeekTarget + " secondSeekTarget=" + v.secondSeekTarget);
manager.finished(v.token);
removeNodeAndSource(v);
};
var onFirstSeekComplete = function(event) {
@ -47,6 +49,7 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=1022913
var onLoadedMetadata = function(event) {
// Seek to the mid-point between the start and the first keyframe.
var v = event.target;
v.removeEventListener("loadedmetadata", onLoadedMetadata);
v.addEventListener("seeked", onFirstSeekComplete);
v.firstSeekTarget = v.keyframes[1] * 0.5;
v.currentTime = v.firstSeekTarget;

Просмотреть файл

@ -39,6 +39,19 @@ function playElement(e) {
function loadedAll(elementList) {
elements = elementList;
// Log events for debugging.
var events = ["suspend", "play", "canplay", "canplaythrough", "loadstart", "loadedmetadata",
"loadeddata", "playing", "ended", "error", "stalled", "emptied", "abort",
"waiting", "pause"];
function logEvent(e) {
info(e.target._name + ": got " + e.type);
}
elementList.forEach(function(element) {
events.forEach(function(evt) {
element.addEventListener(evt, logEvent, false);
});
});
// Blow away the subframe
document.body.removeChild(document.getElementById("frame"));

Просмотреть файл

@ -4,18 +4,19 @@
* You can obtain one at http://mozilla.org/MPL/2.0/.
*
* The origin of this IDL file is
* http://www.w3.org/TR/WebCryptoAPI/
* https://dvcs.w3.org/hg/webcrypto-api/raw-file/tip/spec/Overview.html#crypto-interface
*/
[NoInterfaceObject]
interface RandomSource {
[Throws]
ArrayBufferView getRandomValues(ArrayBufferView array);
interface GlobalCrypto {
[Throws] readonly attribute Crypto crypto;
};
Crypto implements RandomSource;
//[Exposed=(Window,Worker)]
interface Crypto {
[Pref="dom.webcrypto.enabled"]
readonly attribute SubtleCrypto subtle;
[Throws]
ArrayBufferView getRandomValues(ArrayBufferView array);
};

Просмотреть файл

@ -230,10 +230,7 @@ partial interface Window {
};
// https://dvcs.w3.org/hg/webcrypto-api/raw-file/tip/spec/Overview.html
partial interface Window {
//[Throws] readonly attribute Crypto crypto;
[Throws] readonly attribute nsIDOMCrypto crypto;
};
Window implements GlobalCrypto;
#ifdef MOZ_WEBSPEECH
// http://dvcs.w3.org/hg/speech-api/raw-file/tip/speechapi.html

Просмотреть файл

@ -9,7 +9,6 @@ GENERATED_WEBIDL_FILES = [
]
PREPROCESSED_WEBIDL_FILES = [
'Crypto.webidl',
'HTMLMediaElement.webidl',
'Navigator.webidl',
'Window.webidl',
@ -81,6 +80,7 @@ WEBIDL_FILES = [
'ContainerBoxObject.webidl',
'ConvolverNode.webidl',
'Coordinates.webidl',
'Crypto.webidl',
'CSPReport.webidl',
'CSS.webidl',
'CSSPrimitiveValue.webidl',

Просмотреть файл

@ -400,8 +400,8 @@ UpdateCommonJSGCMemoryOption(RuntimeService* aRuntimeService,
}
void
UpdatOtherJSGCMemoryOption(RuntimeService* aRuntimeService,
JSGCParamKey aKey, uint32_t aValue)
UpdateOtherJSGCMemoryOption(RuntimeService* aRuntimeService,
JSGCParamKey aKey, uint32_t aValue)
{
AssertIsOnMainThread();
@ -466,14 +466,14 @@ LoadJSGCMemoryOptions(const char* aPrefName, void* /* aClosure */)
uint32_t value = (prefValue <= 0 || prefValue >= 0x1000) ?
uint32_t(-1) :
uint32_t(prefValue) * 1024 * 1024;
UpdatOtherJSGCMemoryOption(rts, JSGC_MAX_BYTES, value);
UpdateOtherJSGCMemoryOption(rts, JSGC_MAX_BYTES, value);
continue;
}
matchName.RebindLiteral(PREF_MEM_OPTIONS_PREFIX "high_water_mark");
if (memPrefName == matchName || (gRuntimeServiceDuringInit && index == 1)) {
int32_t prefValue = GetWorkerPref(matchName, 128);
UpdatOtherJSGCMemoryOption(rts, JSGC_MAX_MALLOC_BYTES,
UpdateOtherJSGCMemoryOption(rts, JSGC_MAX_MALLOC_BYTES,
uint32_t(prefValue) * 1024 * 1024);
continue;
}
@ -538,7 +538,7 @@ LoadJSGCMemoryOptions(const char* aPrefName, void* /* aClosure */)
int32_t prefValue = GetWorkerPref(matchName, -1);
uint32_t value =
(prefValue <= 0 || prefValue >= 100000) ? 0 : uint32_t(prefValue);
UpdatOtherJSGCMemoryOption(rts, JSGC_SLICE_TIME_BUDGET, value);
UpdateOtherJSGCMemoryOption(rts, JSGC_SLICE_TIME_BUDGET, value);
continue;
}
@ -546,7 +546,7 @@ LoadJSGCMemoryOptions(const char* aPrefName, void* /* aClosure */)
if (memPrefName == matchName ||
(gRuntimeServiceDuringInit && index == 10)) {
bool prefValue = GetWorkerPref(matchName, false);
UpdatOtherJSGCMemoryOption(rts, JSGC_DYNAMIC_HEAP_GROWTH,
UpdateOtherJSGCMemoryOption(rts, JSGC_DYNAMIC_HEAP_GROWTH,
prefValue ? 0 : 1);
continue;
}
@ -555,7 +555,7 @@ LoadJSGCMemoryOptions(const char* aPrefName, void* /* aClosure */)
if (memPrefName == matchName ||
(gRuntimeServiceDuringInit && index == 11)) {
bool prefValue = GetWorkerPref(matchName, false);
UpdatOtherJSGCMemoryOption(rts, JSGC_DYNAMIC_MARK_SLICE,
UpdateOtherJSGCMemoryOption(rts, JSGC_DYNAMIC_MARK_SLICE,
prefValue ? 0 : 1);
continue;
}
@ -574,6 +574,15 @@ LoadJSGCMemoryOptions(const char* aPrefName, void* /* aClosure */)
continue;
}
matchName.RebindLiteral(PREF_MEM_OPTIONS_PREFIX "gc_compacting");
if (memPrefName == matchName ||
(gRuntimeServiceDuringInit && index == 14)) {
bool prefValue = GetWorkerPref(matchName, false);
UpdateOtherJSGCMemoryOption(rts, JSGC_COMPACTING_ENABLED,
prefValue ? 0 : 1);
continue;
}
#ifdef DEBUG
nsAutoCString message("Workers don't support the 'mem.");
message.Append(memPrefName);

Просмотреть файл

@ -97,6 +97,7 @@ support-files =
bug1062920_worker.js
webSocket_sharedWorker.js
bug1104064_worker.js
worker_consoleAndBlobs.js
[test_404.html]
[test_atob.html]
@ -197,3 +198,4 @@ skip-if = buildapp == 'b2g' || toolkit == 'android' || e10s #bug 982828
skip-if = buildapp == 'b2g' || toolkit == 'android' || e10s #bug 982828
[test_websocket_pref.html]
[test_bug1104064.html]
[test_consoleAndBlobs.html]

Просмотреть файл

@ -0,0 +1,41 @@
<!--
Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/
-->
<!DOCTYPE HTML>
<html>
<head>
<title>Test for console API and blobs</title>
<script src="/tests/SimpleTest/SimpleTest.js">
</script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css">
</head>
<body>
<script type="text/javascript">
function consoleListener() {
SpecialPowers.addObserver(this, "console-api-log-event", false);
}
var order = 0;
consoleListener.prototype = {
observe: function(aSubject, aTopic, aData) {
ok(true, "Something has been received");
is(aTopic, "console-api-log-event");
SpecialPowers.removeObserver(this, "console-api-log-event");
var obj = aSubject.wrappedJSObject;
is(obj.arguments[0].size, 3, "The size is correct");
is(obj.arguments[0].type, 'foo/bar', "The type is correct");
SimpleTest.finish();
}
}
var cl = new consoleListener();
new Worker('worker_consoleAndBlobs.js');
SimpleTest.waitForExplicitFinish();
</script>
</body>
</html>

Просмотреть файл

@ -0,0 +1,8 @@
/**
* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/
*/
"use strict";
var b = new Blob(['123'], { type: 'foo/bar'});
console.log(b);

Просмотреть файл

@ -19,6 +19,7 @@
#include "mozilla/mozalloc.h" // for operator new
#include "mozilla/TouchEvents.h"
#include "mozilla/Preferences.h" // for Preferences
#include "mozilla/EventStateManager.h" // for WheelPrefs
#include "nsDebug.h" // for NS_WARNING
#include "nsPoint.h" // for nsIntPoint
#include "nsThreadUtils.h" // for NS_IsMainThread
@ -891,11 +892,11 @@ APZCTreeManager::ReceiveInputEvent(WidgetInputEvent& aEvent,
}
case eWheelEventClass: {
WidgetWheelEvent& wheelEvent = *aEvent.AsWheelEvent();
if (wheelEvent.IsControl() ||
wheelEvent.deltaMode != nsIDOMWheelEvent::DOM_DELTA_LINE)
if (wheelEvent.deltaMode != nsIDOMWheelEvent::DOM_DELTA_LINE ||
!EventStateManager::WheelEventIsScrollAction(&wheelEvent))
{
// Don't send through APZ if we could be ctrl+zooming or if the delta
// mode is not line-based.
// Don't send through APZ if we're not scrolling or if the delta mode
// is not line-based.
return ProcessEvent(aEvent, aOutTargetGuid, aOutInputBlockId);
}
return ProcessWheelEvent(wheelEvent, aOutTargetGuid, aOutInputBlockId);

Просмотреть файл

@ -1406,6 +1406,16 @@ public:
return mFontEntry->HasGraphiteTables();
}
// Whether this is a font that may be doing full-color rendering,
// and therefore needs us to use a mask for text-shadow even when
// we're not actually blurring.
bool AlwaysNeedsMaskForShadow() {
return mFontEntry->TryGetColorGlyphs() ||
mFontEntry->TryGetSVGData(this) ||
mFontEntry->HasFontTable(TRUETYPE_TAG('C','B','D','T')) ||
mFontEntry->HasFontTable(TRUETYPE_TAG('s','b','i','x'));
}
// whether a feature is supported by the font (limited to a small set
// of features for which some form of fallback needs to be implemented)
bool SupportsFeature(int32_t aScript, uint32_t aFeatureTag);

Просмотреть файл

@ -302,21 +302,6 @@ DisableIncrementalGC(JSRuntime *rt);
extern JS_PUBLIC_API(bool)
IsIncrementalGCEnabled(JSRuntime *rt);
/*
* Compacting GC defaults to enabled, but may be disabled for testing or in
* embeddings that have not implemented the necessary object moved hooks or weak
* pointer callbacks. There is not currently a way to re-enable compacting GC
* once it has been disabled on the runtime.
*/
extern JS_PUBLIC_API(void)
DisableCompactingGC(JSRuntime *rt);
/*
* Returns true if compacting GC is enabled.
*/
extern JS_PUBLIC_API(bool)
IsCompactingGCEnabled(JSRuntime *rt);
/*
* Returns true while an incremental GC is ongoing, both when actively
* collecting and between slices.

Просмотреть файл

@ -253,10 +253,13 @@ class HashMap
rekeyAs(old_key, new_key, new_key);
}
// Infallibly rekey one entry, if present.
void rekeyAs(const Lookup &old_lookup, const Lookup &new_lookup, const Key &new_key) {
if (Ptr p = lookup(old_lookup))
// Infallibly rekey one entry if present, and return whether that happened.
bool rekeyAs(const Lookup &old_lookup, const Lookup &new_lookup, const Key &new_key) {
if (Ptr p = lookup(old_lookup)) {
impl.rekeyAndMaybeRehash(p, new_lookup, new_key);
return true;
}
return false;
}
// HashMap is movable
@ -471,10 +474,13 @@ class HashSet
rekeyAs(old_value, new_value, new_value);
}
// Infallibly rekey one entry, if present.
void rekeyAs(const Lookup &old_lookup, const Lookup &new_lookup, const T &new_value) {
if (Ptr p = lookup(old_lookup))
// Infallibly rekey one entry if present, and return whether that happened.
bool rekeyAs(const Lookup &old_lookup, const Lookup &new_lookup, const T &new_value) {
if (Ptr p = lookup(old_lookup)) {
impl.rekeyAndMaybeRehash(p, new_lookup, new_value);
return true;
}
return false;
}
// Infallibly rekey one entry with a new key that is equivalent.

Просмотреть файл

@ -444,8 +444,8 @@ struct ZoneStats
macro(Other, IsLiveGCThing, lazyScriptsGCHeap) \
macro(Other, NotLiveGCThing, lazyScriptsMallocHeap) \
macro(Other, IsLiveGCThing, jitCodesGCHeap) \
macro(Other, IsLiveGCThing, typeObjectsGCHeap) \
macro(Other, NotLiveGCThing, typeObjectsMallocHeap) \
macro(Other, IsLiveGCThing, objectGroupsGCHeap) \
macro(Other, NotLiveGCThing, objectGroupsMallocHeap) \
macro(Other, NotLiveGCThing, typePool) \
macro(Other, NotLiveGCThing, baselineStubsOptimized) \

Просмотреть файл

@ -51,9 +51,9 @@ enum JSGCTraceKind
JSTRACE_BASE_SHAPE = 0x0F,
JSTRACE_JITCODE = 0x1F,
JSTRACE_LAZY_SCRIPT = 0x2F,
JSTRACE_TYPE_OBJECT = 0x3F,
JSTRACE_OBJECT_GROUP = 0x3F,
JSTRACE_LAST = JSTRACE_TYPE_OBJECT
JSTRACE_LAST = JSTRACE_OBJECT_GROUP
};
namespace JS {

Просмотреть файл

@ -623,7 +623,8 @@ obj_isPrototypeOf(JSContext *cx, unsigned argc, Value *vp)
}
PlainObject *
js::ObjectCreateImpl(JSContext *cx, HandleObject proto, NewObjectKind newKind, HandleTypeObject type)
js::ObjectCreateImpl(JSContext *cx, HandleObject proto, NewObjectKind newKind,
HandleObjectGroup group)
{
// Give the new object a small number of fixed slots, like we do for empty
// object literals ({}).
@ -631,20 +632,20 @@ js::ObjectCreateImpl(JSContext *cx, HandleObject proto, NewObjectKind newKind, H
if (!proto) {
// Object.create(null) is common, optimize it by using an allocation
// site specific TypeObject. Because GetTypeCallerInitObject is pretty
// slow, the caller can pass in the type if it's known and we use that
// site specific ObjectGroup. Because GetCallerInitGroup is pretty
// slow, the caller can pass in the group if it's known and we use that
// instead.
RootedTypeObject ntype(cx, type);
if (!ntype) {
ntype = GetTypeCallerInitObject(cx, JSProto_Null);
if (!ntype)
RootedObjectGroup ngroup(cx, group);
if (!ngroup) {
ngroup = GetCallerInitGroup(cx, JSProto_Null);
if (!ngroup)
return nullptr;
}
MOZ_ASSERT(!ntype->proto().toObjectOrNull());
MOZ_ASSERT(!ngroup->proto().toObjectOrNull());
return NewObjectWithType<PlainObject>(cx, ntype, cx->global(), allocKind,
newKind);
return NewObjectWithGroup<PlainObject>(cx, ngroup, cx->global(), allocKind,
newKind);
}
return NewObjectWithGivenProto<PlainObject>(cx, proto, cx->global(), allocKind, newKind);
@ -654,8 +655,8 @@ PlainObject *
js::ObjectCreateWithTemplate(JSContext *cx, HandlePlainObject templateObj)
{
RootedObject proto(cx, templateObj->getProto());
RootedTypeObject type(cx, templateObj->type());
return ObjectCreateImpl(cx, proto, GenericObject, type);
RootedObjectGroup group(cx, templateObj->group());
return ObjectCreateImpl(cx, proto, GenericObject, group);
}
/* ES5 15.2.3.5: Object.create(O [, Properties]) */
@ -1149,7 +1150,7 @@ CreateObjectPrototype(JSContext *cx, JSProtoKey key)
* to have unknown properties, to simplify handling of e.g. heterogenous
* objects in JSON and script literals.
*/
if (!JSObject::setNewTypeUnknown(cx, &PlainObject::class_, objectProto))
if (!JSObject::setNewGroupUnknown(cx, &PlainObject::class_, objectProto))
return nullptr;
return objectProto;

Просмотреть файл

@ -25,7 +25,7 @@ obj_valueOf(JSContext *cx, unsigned argc, JS::Value *vp);
PlainObject *
ObjectCreateImpl(JSContext *cx, HandleObject proto, NewObjectKind newKind = GenericObject,
HandleTypeObject type = js::NullPtr());
HandleObjectGroup group = js::NullPtr());
PlainObject *
ObjectCreateWithTemplate(JSContext *cx, HandlePlainObject templateObj);

Просмотреть файл

@ -1495,13 +1495,13 @@ OutlineTypedObject::createUnattachedWithClass(JSContext *cx,
MOZ_ASSERT(clasp == &OutlineTransparentTypedObject::class_ ||
clasp == &OutlineOpaqueTypedObject::class_);
RootedTypeObject type(cx, cx->getNewType(clasp, TaggedProto(&descr->typedProto()), descr));
if (!type)
RootedObjectGroup group(cx, cx->getNewGroup(clasp, TaggedProto(&descr->typedProto()), descr));
if (!group)
return nullptr;
NewObjectKind newKind = (heap == gc::TenuredHeap) ? MaybeSingletonObject : GenericObject;
OutlineTypedObject *obj = NewObjectWithType<OutlineTypedObject>(cx, type, cx->global(),
gc::FINALIZE_OBJECT0, newKind);
OutlineTypedObject *obj = NewObjectWithGroup<OutlineTypedObject>(cx, group, cx->global(),
gc::FINALIZE_OBJECT0, newKind);
if (!obj)
return nullptr;
@ -2179,12 +2179,12 @@ InlineTypedObject::create(JSContext *cx, HandleTypeDescr descr, gc::InitialHeap
? &InlineOpaqueTypedObject::class_
: &InlineTransparentTypedObject::class_;
RootedTypeObject type(cx, cx->getNewType(clasp, TaggedProto(&descr->typedProto()), descr));
if (!type)
RootedObjectGroup group(cx, cx->getNewGroup(clasp, TaggedProto(&descr->typedProto()), descr));
if (!group)
return nullptr;
NewObjectKind newKind = (heap == gc::TenuredHeap) ? MaybeSingletonObject : GenericObject;
return NewObjectWithType<InlineTypedObject>(cx, type, cx->global(), allocKind, newKind);
return NewObjectWithGroup<InlineTypedObject>(cx, group, cx->global(), allocKind, newKind);
}
/* static */ InlineTypedObject *

Просмотреть файл

@ -584,7 +584,7 @@ class TypedObject : public JSObject
}
TypeDescr &typeDescr() const {
return type()->typeDescr();
return group()->typeDescr();
}
TypeDescr &maybeForwardedTypeDescr() const {

Просмотреть файл

@ -4295,7 +4295,7 @@ ParseNode::getConstantValue(ExclusiveContext *cx, AllowConstantObjects allowObje
}
MOZ_ASSERT(idx == count);
types::FixArrayType(cx, obj);
types::FixArrayGroup(cx, obj);
vp.setObject(*obj);
return true;
}
@ -4358,7 +4358,7 @@ ParseNode::getConstantValue(ExclusiveContext *cx, AllowConstantObjects allowObje
}
}
types::FixObjectType(cx, obj);
types::FixObjectGroup(cx, obj);
vp.setObject(*obj);
return true;
}
@ -4376,7 +4376,7 @@ EmitSingletonInitialiser(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *
return false;
RootedNativeObject obj(cx, &value.toObject().as<NativeObject>());
if (!obj->is<ArrayObject>() && !JSObject::setSingletonType(cx, obj))
if (!obj->is<ArrayObject>() && !JSObject::setSingleton(cx, obj))
return false;
ObjectBox *objbox = bce->parser->newObjectBox(obj);

Просмотреть файл

@ -183,7 +183,7 @@ class Shape;
class UnownedBaseShape;
namespace types {
struct TypeObject;
struct ObjectGroup;
}
namespace jit {
@ -232,7 +232,7 @@ template <> struct MapTypeToTraceKind<SharedArrayBufferObject>{ static const JSG
template <> struct MapTypeToTraceKind<SharedTypedArrayObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
template <> struct MapTypeToTraceKind<UnownedBaseShape> { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; };
template <> struct MapTypeToTraceKind<jit::JitCode> { static const JSGCTraceKind kind = JSTRACE_JITCODE; };
template <> struct MapTypeToTraceKind<types::TypeObject>{ static const JSGCTraceKind kind = JSTRACE_TYPE_OBJECT; };
template <> struct MapTypeToTraceKind<types::ObjectGroup> { static const JSGCTraceKind kind = JSTRACE_OBJECT_GROUP; };
// Direct value access used by the write barriers and the jits.
void
@ -806,7 +806,7 @@ typedef HeapPtr<PropertyName*> HeapPtrPropertyName;
typedef HeapPtr<Shape*> HeapPtrShape;
typedef HeapPtr<UnownedBaseShape*> HeapPtrUnownedBaseShape;
typedef HeapPtr<jit::JitCode*> HeapPtrJitCode;
typedef HeapPtr<types::TypeObject*> HeapPtrTypeObject;
typedef HeapPtr<types::ObjectGroup*> HeapPtrObjectGroup;
typedef PreBarriered<Value> PreBarrieredValue;
typedef RelocatablePtr<Value> RelocatableValue;
@ -827,7 +827,7 @@ typedef ReadBarriered<ScriptSourceObject*> ReadBarrieredScriptSourceObject;
typedef ReadBarriered<Shape*> ReadBarrieredShape;
typedef ReadBarriered<UnownedBaseShape*> ReadBarrieredUnownedBaseShape;
typedef ReadBarriered<jit::JitCode*> ReadBarrieredJitCode;
typedef ReadBarriered<types::TypeObject*> ReadBarrieredTypeObject;
typedef ReadBarriered<types::ObjectGroup*> ReadBarrieredObjectGroup;
typedef ReadBarriered<JSAtom*> ReadBarrieredAtom;
typedef ReadBarriered<JS::Symbol*> ReadBarrieredSymbol;

Просмотреть файл

@ -826,7 +826,7 @@ class GCRuntime
Finished
};
void minorGCImpl(JS::gcreason::Reason reason, Nursery::TypeObjectList *pretenureTypes);
void minorGCImpl(JS::gcreason::Reason reason, Nursery::ObjectGroupList *pretenureGroups);
// For ArenaLists::allocateFromArena()
friend class ArenaLists;
@ -1097,10 +1097,15 @@ class GCRuntime
unsigned generationalDisabled;
/*
* Some code cannot tolerate compacting GC so it can be disabled with this
* counter.
* Whether compacting GC can is enabled globally.
*/
unsigned compactingDisabled;
bool compactingEnabled;
/*
* Some code cannot tolerate compacting GC so it can be disabled temporarily
* with AutoDisableCompactingGC which uses this counter.
*/
unsigned compactingDisabledCount;
/*
* This is true if we are in the middle of a brain transplant (e.g.,

Просмотреть файл

@ -25,7 +25,7 @@ JS_STATIC_ASSERT(LastObjectAllocKind == FINALIZE_OBJECT_LAST);
static FILE *gcTraceFile = nullptr;
static HashSet<const Class *, DefaultHasher<const Class *>, SystemAllocPolicy> tracedClasses;
static HashSet<const TypeObject *, DefaultHasher<const TypeObject *>, SystemAllocPolicy> tracedTypes;
static HashSet<const ObjectGroup *, DefaultHasher<const ObjectGroup *>, SystemAllocPolicy> tracedGroups;
static inline void
WriteWord(uint64_t data)
@ -156,26 +156,26 @@ MaybeTraceClass(const Class *clasp)
}
static void
MaybeTraceType(TypeObject *type)
MaybeTraceGroup(ObjectGroup *group)
{
if (tracedTypes.has(type))
if (tracedGroups.has(group))
return;
MaybeTraceClass(type->clasp());
TraceEvent(TraceEventTypeInfo, uint64_t(type));
TraceAddress(type->clasp());
TraceInt(type->flags());
MaybeTraceClass(group->clasp());
TraceEvent(TraceEventGroupInfo, uint64_t(group));
TraceAddress(group->clasp());
TraceInt(group->flags());
MOZ_ALWAYS_TRUE(tracedTypes.put(type));
MOZ_ALWAYS_TRUE(tracedGroups.put(group));
}
void
js::gc::TraceTypeNewScript(TypeObject *type)
js::gc::TraceTypeNewScript(ObjectGroup *group)
{
const size_t bufLength = 128;
static char buffer[bufLength];
MOZ_ASSERT(type->hasNewScript());
JSAtom *funName = type->newScript()->fun->displayAtom();
MOZ_ASSERT(group->hasNewScript());
JSAtom *funName = group->newScript()->fun->displayAtom();
if (!funName)
return;
@ -184,7 +184,7 @@ js::gc::TraceTypeNewScript(TypeObject *type)
CopyChars(reinterpret_cast<Latin1Char *>(buffer), *funName);
buffer[length] = 0;
TraceEvent(TraceEventTypeNewScript, uint64_t(type));
TraceEvent(TraceEventTypeNewScript, uint64_t(group));
TraceString(buffer);
}
@ -194,10 +194,10 @@ js::gc::TraceCreateObject(JSObject* object)
if (!gcTraceFile)
return;
TypeObject *type = object->type();
MaybeTraceType(type);
ObjectGroup *group = object->group();
MaybeTraceGroup(group);
TraceEvent(TraceEventCreateObject, uint64_t(object));
TraceAddress(type);
TraceAddress(group);
}
void
@ -230,8 +230,8 @@ js::gc::TraceTenuredFinalize(Cell *thing)
{
if (!gcTraceFile)
return;
if (thing->tenuredGetAllocKind() == FINALIZE_TYPE_OBJECT)
tracedTypes.remove(static_cast<const TypeObject *>(thing));
if (thing->tenuredGetAllocKind() == FINALIZE_OBJECT_GROUP)
tracedGroups.remove(static_cast<const ObjectGroup *>(thing));
TraceEvent(TraceEventTenuredFinalize, uint64_t(thing));
}

Просмотреть файл

@ -11,7 +11,7 @@
namespace js {
namespace types { struct TypeObject; }
namespace types { struct ObjectGroup; }
namespace gc {
@ -29,7 +29,7 @@ extern void TraceMinorGCEnd();
extern void TraceMajorGCStart();
extern void TraceTenuredFinalize(Cell *thing);
extern void TraceMajorGCEnd();
extern void TraceTypeNewScript(js::types::TypeObject *type);
extern void TraceTypeNewScript(js::types::ObjectGroup *group);
#else
@ -45,7 +45,7 @@ inline void TraceMinorGCEnd() {}
inline void TraceMajorGCStart() {}
inline void TraceTenuredFinalize(Cell *thing) {}
inline void TraceMajorGCEnd() {}
inline void TraceTypeNewScript(js::types::TypeObject *type) {}
inline void TraceTypeNewScript(js::types::ObjectGroup *group) {}
#endif

Просмотреть файл

@ -94,7 +94,7 @@ enum AllocKind {
FINALIZE_SHAPE,
FINALIZE_ACCESSOR_SHAPE,
FINALIZE_BASE_SHAPE,
FINALIZE_TYPE_OBJECT,
FINALIZE_OBJECT_GROUP,
FINALIZE_FAT_INLINE_STRING,
FINALIZE_STRING,
FINALIZE_EXTERNAL_STRING,
@ -110,29 +110,29 @@ static inline JSGCTraceKind
MapAllocToTraceKind(AllocKind kind)
{
static const JSGCTraceKind map[] = {
JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT0_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT2 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT2_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT4 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT4_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT8 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT8_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT12 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */
JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */
JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */
JSTRACE_SHAPE, /* FINALIZE_SHAPE */
JSTRACE_SHAPE, /* FINALIZE_ACCESSOR_SHAPE */
JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
JSTRACE_STRING, /* FINALIZE_FAT_INLINE_STRING */
JSTRACE_STRING, /* FINALIZE_STRING */
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */
JSTRACE_SYMBOL, /* FINALIZE_SYMBOL */
JSTRACE_JITCODE, /* FINALIZE_JITCODE */
JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT0_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT2 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT2_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT4 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT4_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT8 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT8_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT12 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */
JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */
JSTRACE_LAZY_SCRIPT, /* FINALIZE_LAZY_SCRIPT */
JSTRACE_SHAPE, /* FINALIZE_SHAPE */
JSTRACE_SHAPE, /* FINALIZE_ACCESSOR_SHAPE */
JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
JSTRACE_OBJECT_GROUP, /* FINALIZE_OBJECT_GROUP */
JSTRACE_STRING, /* FINALIZE_FAT_INLINE_STRING */
JSTRACE_STRING, /* FINALIZE_STRING */
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */
JSTRACE_SYMBOL, /* FINALIZE_SYMBOL */
JSTRACE_JITCODE, /* FINALIZE_JITCODE */
};
static_assert(MOZ_ARRAY_LENGTH(map) == FINALIZE_LIMIT,

Просмотреть файл

@ -87,7 +87,7 @@ static inline void
PushMarkStack(GCMarker *gcmarker, JS::Symbol *sym);
static inline void
PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing);
PushMarkStack(GCMarker *gcmarker, types::ObjectGroup *thing);
namespace js {
namespace gc {
@ -98,7 +98,7 @@ static void MarkChildren(JSTracer *trc, JSScript *script);
static void MarkChildren(JSTracer *trc, LazyScript *lazy);
static void MarkChildren(JSTracer *trc, Shape *shape);
static void MarkChildren(JSTracer *trc, BaseShape *base);
static void MarkChildren(JSTracer *trc, types::TypeObject *type);
static void MarkChildren(JSTracer *trc, types::ObjectGroup *group);
static void MarkChildren(JSTracer *trc, jit::JitCode *code);
} /* namespace gc */
@ -614,7 +614,7 @@ DeclMarkerImpl(String, JSFlatString)
DeclMarkerImpl(String, JSLinearString)
DeclMarkerImpl(String, PropertyName)
DeclMarkerImpl(Symbol, JS::Symbol)
DeclMarkerImpl(TypeObject, js::types::TypeObject)
DeclMarkerImpl(ObjectGroup, js::types::ObjectGroup)
} /* namespace gc */
} /* namespace js */
@ -654,8 +654,8 @@ gc::MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind)
case JSTRACE_SHAPE:
MarkInternal(trc, reinterpret_cast<Shape **>(thingp));
break;
case JSTRACE_TYPE_OBJECT:
MarkInternal(trc, reinterpret_cast<types::TypeObject **>(thingp));
case JSTRACE_OBJECT_GROUP:
MarkInternal(trc, reinterpret_cast<types::ObjectGroup **>(thingp));
break;
default:
MOZ_CRASH("Invalid trace kind in MarkKind.");
@ -791,14 +791,14 @@ gc::MarkTypeRoot(JSTracer *trc, types::Type *v, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
trc->setTracingName(name);
if (v->isSingleObject()) {
JSObject *obj = v->singleObject();
if (v->isSingleton()) {
JSObject *obj = v->singleton();
MarkInternal(trc, &obj);
*v = types::Type::ObjectType(obj);
} else if (v->isTypeObject()) {
types::TypeObject *typeObj = v->typeObject();
MarkInternal(trc, &typeObj);
*v = types::Type::ObjectType(typeObj);
} else if (v->isGroup()) {
types::ObjectGroup *group = v->group();
MarkInternal(trc, &group);
*v = types::Type::ObjectType(group);
}
}
@ -1066,7 +1066,7 @@ PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
}
static void
PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
PushMarkStack(GCMarker *gcmarker, types::ObjectGroup *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
MOZ_ASSERT(!IsInsideNursery(thing));
@ -1422,63 +1422,63 @@ gc::MarkCycleCollectorChildren(JSTracer *trc, Shape *shape)
}
static void
ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
ScanObjectGroup(GCMarker *gcmarker, types::ObjectGroup *group)
{
unsigned count = type->getPropertyCount();
unsigned count = group->getPropertyCount();
for (unsigned i = 0; i < count; i++) {
if (types::Property *prop = type->getProperty(i))
MarkId(gcmarker, &prop->id, "TypeObject property id");
if (types::Property *prop = group->getProperty(i))
MarkId(gcmarker, &prop->id, "ObjectGroup property id");
}
if (type->proto().isObject())
PushMarkStack(gcmarker, type->proto().toObject());
if (group->proto().isObject())
PushMarkStack(gcmarker, group->proto().toObject());
if (type->singleton() && !type->lazy())
PushMarkStack(gcmarker, type->singleton());
if (group->singleton() && !group->lazy())
PushMarkStack(gcmarker, group->singleton());
if (type->newScript())
type->newScript()->trace(gcmarker);
if (group->newScript())
group->newScript()->trace(gcmarker);
if (type->maybeUnboxedLayout())
type->unboxedLayout().trace(gcmarker);
if (group->maybeUnboxedLayout())
group->unboxedLayout().trace(gcmarker);
if (TypeDescr *descr = type->maybeTypeDescr())
if (TypeDescr *descr = group->maybeTypeDescr())
PushMarkStack(gcmarker, descr);
if (JSFunction *fun = type->maybeInterpretedFunction())
if (JSFunction *fun = group->maybeInterpretedFunction())
PushMarkStack(gcmarker, fun);
}
static void
gc::MarkChildren(JSTracer *trc, types::TypeObject *type)
gc::MarkChildren(JSTracer *trc, types::ObjectGroup *group)
{
unsigned count = type->getPropertyCount();
unsigned count = group->getPropertyCount();
for (unsigned i = 0; i < count; i++) {
types::Property *prop = type->getProperty(i);
types::Property *prop = group->getProperty(i);
if (prop)
MarkId(trc, &prop->id, "type_prop");
MarkId(trc, &prop->id, "group_property");
}
if (type->proto().isObject())
MarkObject(trc, &type->protoRaw(), "type_proto");
if (group->proto().isObject())
MarkObject(trc, &group->protoRaw(), "group_proto");
if (type->singleton() && !type->lazy())
MarkObject(trc, &type->singletonRaw(), "type_singleton");
if (group->singleton() && !group->lazy())
MarkObject(trc, &group->singletonRaw(), "group_singleton");
if (type->newScript())
type->newScript()->trace(trc);
if (group->newScript())
group->newScript()->trace(trc);
if (type->maybeUnboxedLayout())
type->unboxedLayout().trace(trc);
if (group->maybeUnboxedLayout())
group->unboxedLayout().trace(trc);
if (JSObject *descr = type->maybeTypeDescr()) {
MarkObjectUnbarriered(trc, &descr, "type_descr");
type->setTypeDescr(&descr->as<TypeDescr>());
if (JSObject *descr = group->maybeTypeDescr()) {
MarkObjectUnbarriered(trc, &descr, "group_type_descr");
group->setTypeDescr(&descr->as<TypeDescr>());
}
if (JSObject *fun = type->maybeInterpretedFunction()) {
MarkObjectUnbarriered(trc, &fun, "type_function");
type->setInterpretedFunction(&fun->as<JSFunction>());
if (JSObject *fun = group->maybeInterpretedFunction()) {
MarkObjectUnbarriered(trc, &fun, "group_function");
group->setInterpretedFunction(&fun->as<JSFunction>());
}
}
@ -1532,8 +1532,8 @@ gc::PushArena(GCMarker *gcmarker, ArenaHeader *aheader)
PushArenaTyped<js::Shape>(gcmarker, aheader);
break;
case JSTRACE_TYPE_OBJECT:
PushArenaTyped<js::types::TypeObject>(gcmarker, aheader);
case JSTRACE_OBJECT_GROUP:
PushArenaTyped<js::types::ObjectGroup>(gcmarker, aheader);
break;
default:
@ -1651,8 +1651,8 @@ GCMarker::restoreValueArray(NativeObject *obj, void **vpp, void **endp)
void
GCMarker::processMarkStackOther(uintptr_t tag, uintptr_t addr)
{
if (tag == TypeTag) {
ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
if (tag == GroupTag) {
ScanObjectGroup(this, reinterpret_cast<types::ObjectGroup *>(addr));
} else if (tag == SavedValueArrayTag) {
MOZ_ASSERT(!(addr & CellMask));
NativeObject *obj = reinterpret_cast<NativeObject *>(addr);
@ -1796,14 +1796,14 @@ GCMarker::processMarkStackTop(SliceBudget &budget)
return;
}
types::TypeObject *type = obj->typeFromGC();
PushMarkStack(this, type);
types::ObjectGroup *group = obj->groupFromGC();
PushMarkStack(this, group);
Shape *shape = obj->lastProperty();
PushMarkStack(this, shape);
/* Call the trace hook if necessary. */
const Class *clasp = type->clasp();
const Class *clasp = group->clasp();
if (clasp->trace) {
// Global objects all have the same trace hook. That hook is safe without barriers
// if the global has no custom trace hook of its own, or has been moved to a different
@ -1950,8 +1950,8 @@ js::TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
MarkChildren(trc, static_cast<Shape *>(thing));
break;
case JSTRACE_TYPE_OBJECT:
MarkChildren(trc, (types::TypeObject *)thing);
case JSTRACE_OBJECT_GROUP:
MarkChildren(trc, (types::ObjectGroup *)thing);
break;
default:

Просмотреть файл

@ -132,7 +132,7 @@ DeclMarker(String, JSFlatString)
DeclMarker(String, JSLinearString)
DeclMarker(String, PropertyName)
DeclMarker(Symbol, JS::Symbol)
DeclMarker(TypeObject, types::TypeObject)
DeclMarker(ObjectGroup, types::ObjectGroup)
#undef DeclMarker

Просмотреть файл

@ -164,10 +164,10 @@ js::Nursery::verifyFinalizerList()
if (overlay->isForwarded())
obj = static_cast<JSObject *>(overlay->forwardingAddress());
MOZ_ASSERT(obj);
MOZ_ASSERT(obj->type());
MOZ_ASSERT(obj->type()->clasp());
MOZ_ASSERT(obj->type()->clasp()->finalize);
MOZ_ASSERT(obj->type()->clasp()->flags & JSCLASS_FINALIZE_FROM_NURSERY);
MOZ_ASSERT(obj->group());
MOZ_ASSERT(obj->group()->clasp());
MOZ_ASSERT(obj->group()->clasp()->finalize);
MOZ_ASSERT(obj->group()->clasp()->flags & JSCLASS_FINALIZE_FROM_NURSERY);
}
#endif // DEBUG
}
@ -533,15 +533,15 @@ js::Nursery::forwardBufferPointer(HeapSlot **pSlotsElems)
MOZ_ASSERT(IsWriteableAddress(*pSlotsElems));
}
// Structure for counting how many times objects of a particular type have been
// tenured during a minor collection.
// Structure for counting how many times objects in a particular group have
// been tenured during a minor collection.
struct TenureCount
{
types::TypeObject *type;
types::ObjectGroup *group;
int count;
};
// Keep rough track of how many times we tenure objects of particular types
// Keep rough track of how many times we tenure objects in particular groups
// during minor collections, using a fixed size hash for efficiency at the cost
// of potential collisions.
struct Nursery::TenureCountCache
@ -550,8 +550,8 @@ struct Nursery::TenureCountCache
TenureCountCache() { PodZero(this); }
TenureCount &findEntry(types::TypeObject *type) {
return entries[PointerHasher<types::TypeObject *, 3>::hash(type) % ArrayLength(entries)];
TenureCount &findEntry(types::ObjectGroup *group) {
return entries[PointerHasher<types::ObjectGroup *, 3>::hash(group) % ArrayLength(entries)];
}
};
@ -562,11 +562,11 @@ js::Nursery::collectToFixedPoint(MinorCollectionTracer *trc, TenureCountCache &t
JSObject *obj = static_cast<JSObject*>(p->forwardingAddress());
traceObject(trc, obj);
TenureCount &entry = tenureCounts.findEntry(obj->type());
if (entry.type == obj->type()) {
TenureCount &entry = tenureCounts.findEntry(obj->group());
if (entry.group == obj->group()) {
entry.count++;
} else if (!entry.type) {
entry.type = obj->type();
} else if (!entry.group) {
entry.group = obj->group();
entry.count = 1;
}
}
@ -765,7 +765,7 @@ js::Nursery::MinorGCCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
#define TIME_TOTAL(name) (timstampEnd_##name - timstampStart_##name)
void
js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList *pretenureTypes)
js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, ObjectGroupList *pretenureGroups)
{
if (rt->mainThread.suppressGC)
return;
@ -897,14 +897,14 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList
// If we are promoting the nursery, or exhausted the store buffer with
// pointers to nursery things, which will force a collection well before
// the nursery is full, look for object types that are getting promoted
// the nursery is full, look for object groups that are getting promoted
// excessively and try to pretenure them.
TIME_START(pretenure);
if (pretenureTypes && (promotionRate > 0.8 || reason == JS::gcreason::FULL_STORE_BUFFER)) {
if (pretenureGroups && (promotionRate > 0.8 || reason == JS::gcreason::FULL_STORE_BUFFER)) {
for (size_t i = 0; i < ArrayLength(tenureCounts.entries); i++) {
const TenureCount &entry = tenureCounts.entries[i];
if (entry.count >= 3000)
pretenureTypes->append(entry.type); // ignore alloc failure
pretenureGroups->append(entry.group); // ignore alloc failure
}
}
TIME_END(pretenure);

Просмотреть файл

@ -40,7 +40,7 @@ class MinorCollectionTracer;
} /* namespace gc */
namespace types {
struct TypeObject;
struct ObjectGroup;
}
namespace jit {
@ -117,13 +117,13 @@ class Nursery
/* Free a slots array. */
void freeSlots(HeapSlot *slots);
typedef Vector<types::TypeObject *, 0, SystemAllocPolicy> TypeObjectList;
typedef Vector<types::ObjectGroup *, 0, SystemAllocPolicy> ObjectGroupList;
/*
* Do a minor collection, optionally specifying a list to store types which
* Do a minor collection, optionally specifying a list to store groups which
* should be pretenured afterwards.
*/
void collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList *pretenureTypes);
void collect(JSRuntime *rt, JS::gcreason::Reason reason, ObjectGroupList *pretenureGroups);
/*
* Check if the thing at |*ref| in the Nursery has been forwarded. If so,

Просмотреть файл

@ -103,8 +103,8 @@ MarkExactStackRootsAcrossTypes(T context, JSTracer *trc)
MarkExactStackRootList<JSObject *, MarkObjectRoot>(trc, context, "exact-object");
MarkExactStackRootList<Shape *, MarkShapeRoot>(trc, context, "exact-shape");
MarkExactStackRootList<BaseShape *, MarkBaseShapeRoot>(trc, context, "exact-baseshape");
MarkExactStackRootList<types::TypeObject *, MarkTypeObjectRoot>(
trc, context, "exact-typeobject");
MarkExactStackRootList<types::ObjectGroup *, MarkObjectGroupRoot>(
trc, context, "exact-objectgroup");
MarkExactStackRootList<JSString *, MarkStringRoot>(trc, context, "exact-string");
MarkExactStackRootList<JS::Symbol *, MarkSymbolRoot>(trc, context, "exact-symbol");
MarkExactStackRootList<jit::JitCode *, MarkJitCodeRoot>(trc, context, "exact-jitcode");

Просмотреть файл

@ -21,13 +21,13 @@ class PlainObject;
class ScriptSourceObject;
class Shape;
namespace types { struct TypeObject; }
namespace types { struct ObjectGroup; }
// These are internal counterparts to the public types such as HandleObject.
typedef JS::Handle<NativeObject*> HandleNativeObject;
typedef JS::Handle<Shape*> HandleShape;
typedef JS::Handle<types::TypeObject*> HandleTypeObject;
typedef JS::Handle<types::ObjectGroup*> HandleObjectGroup;
typedef JS::Handle<JSAtom*> HandleAtom;
typedef JS::Handle<JSLinearString*> HandleLinearString;
typedef JS::Handle<PropertyName*> HandlePropertyName;
@ -41,7 +41,7 @@ typedef JS::MutableHandle<NativeObject*> MutableHandleNativeObject;
typedef JS::Rooted<NativeObject*> RootedNativeObject;
typedef JS::Rooted<Shape*> RootedShape;
typedef JS::Rooted<types::TypeObject*> RootedTypeObject;
typedef JS::Rooted<types::ObjectGroup*> RootedObjectGroup;
typedef JS::Rooted<JSAtom*> RootedAtom;
typedef JS::Rooted<JSLinearString*> RootedLinearString;
typedef JS::Rooted<PropertyName*> RootedPropertyName;

Просмотреть файл

@ -580,6 +580,8 @@ Join(const FragmentVector &fragments) {
UniqueChars
Statistics::formatDescription()
{
const double bytesPerMiB = 1024 * 1024;
int64_t sccTotal, sccLongest;
sccDurations(&sccTotal, &sccLongest);
@ -599,6 +601,7 @@ Statistics::formatDescription()
SCC Sweep Total (MaxPause): %.3fms (%.3fms)\n\
HeapSize: %.3f MiB\n\
Chunk Delta (magnitude): %+d (%d)\n\
Arenas Relocated: %.3f MiB\n\
";
char buffer[1024];
memset(buffer, 0, sizeof(buffer));
@ -613,9 +616,10 @@ Statistics::formatDescription()
counts[STAT_STOREBUFFER_OVERFLOW],
mmu20 * 100., mmu50 * 100.,
t(sccTotal), t(sccLongest),
double(preBytes) / 1024. / 1024.,
double(preBytes) / bytesPerMiB,
counts[STAT_NEW_CHUNK] - counts[STAT_DESTROY_CHUNK], counts[STAT_NEW_CHUNK] +
counts[STAT_DESTROY_CHUNK]);
counts[STAT_DESTROY_CHUNK],
double(ArenaSize * counts[STAT_ARENA_RELOCATED]) / bytesPerMiB);
return make_string_copy(buffer);
}

Просмотреть файл

@ -95,6 +95,9 @@ enum Stat {
// compaction
STAT_STOREBUFFER_OVERFLOW,
// Number of arenas relocated by compacting GC.
STAT_ARENA_RELOCATED,
STAT_LIMIT
};

Просмотреть файл

@ -233,8 +233,8 @@ JS_GetTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing,
name = "shape";
break;
case JSTRACE_TYPE_OBJECT:
name = "type_object";
case JSTRACE_OBJECT_GROUP:
name = "object_group";
break;
default:

Просмотреть файл

@ -23,7 +23,7 @@ namespace jit {
class JitCode;
}
namespace types {
struct TypeObject;
struct ObjectGroup;
}
static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
@ -143,8 +143,8 @@ class GCMarker : public JSTracer
pushTaggedPtr(ObjectTag, obj);
}
void pushType(types::TypeObject *type) {
pushTaggedPtr(TypeTag, type);
void pushType(types::ObjectGroup *group) {
pushTaggedPtr(GroupTag, group);
}
void pushJitCode(jit::JitCode *code) {
@ -231,7 +231,7 @@ class GCMarker : public JSTracer
enum StackTag {
ValueArrayTag,
ObjectTag,
TypeTag,
GroupTag,
XmlTag,
SavedValueArrayTag,
JitCodeTag,

Просмотреть файл

@ -56,10 +56,10 @@ var stacks;
var ffi = function(enable) {
if (enable == +1)
enableSPSProfiling();
if (enable == -1)
disableSPSProfiling();
enableSingleStepProfiling();
stacks = disableSingleStepProfiling();
if (enable == -1)
disableSPSProfiling();
}
var f = asmLink(asmCompile('global','ffis',USE_ASM + "var ffi=ffis.ffi; function g(i) { i=i|0; ffi(i|0) } function f(i) { i=i|0; g(i|0) } return f"), null, {ffi});
f(0);

Просмотреть файл

@ -21,8 +21,8 @@ print(uneval(findPath(a, o)));
function C() {}
C.prototype.obj = {};
var c = new C;
Match.Pattern([{node: {}, edge: "type"},
{node: Match.Pattern.ANY, edge: "type_proto"},
Match.Pattern([{node: {}, edge: "group"},
{node: Match.Pattern.ANY, edge: "group_proto"},
{node: { constructor: Match.Pattern.ANY }, edge: "obj"}])
.assert(findPath(c, c.obj));
print(uneval(findPath(c, c.obj)));

Просмотреть файл

@ -0,0 +1,11 @@
// |jit-test| error:TypeError
(function() {
let r
g = function(x) {
((-0x80000000 + (x >>> 0)) != 0) ? 0 : x()
}
})()
g(NaN)
g(0x80000000);

Просмотреть файл

@ -0,0 +1,8 @@
function f(y) {
var x1 = Math.max(-2147483649 >> 0, y >>> 0);
var x2 = x1 | 0;
return (x2 >= 0) ? 1 : 0;
}
assertEq(f(0), 1);
assertEq(f(-1), 0);

Просмотреть файл

@ -1721,21 +1721,21 @@ BaselineCompiler::emit_JSOP_NEWARRAY()
frame.syncStack(0);
uint32_t length = GET_UINT24(pc);
RootedTypeObject type(cx);
if (!types::UseNewTypeForInitializer(script, pc, JSProto_Array)) {
type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
if (!type)
RootedObjectGroup group(cx);
if (!types::UseSingletonForInitializer(script, pc, JSProto_Array)) {
group = types::TypeScript::InitGroup(cx, script, pc, JSProto_Array);
if (!group)
return false;
}
// Pass length in R0, type in R1.
// Pass length in R0, group in R1.
masm.move32(Imm32(length), R0.scratchReg());
masm.movePtr(ImmGCPtr(type), R1.scratchReg());
masm.movePtr(ImmGCPtr(group), R1.scratchReg());
ArrayObject *templateObject = NewDenseUnallocatedArray(cx, length, nullptr, TenuredObject);
if (!templateObject)
return false;
templateObject->setType(type);
templateObject->setGroup(group);
ICNewArray_Fallback::Compiler stubCompiler(cx, templateObject);
if (!emitOpIC(stubCompiler.getStub(&stubSpace_)))
@ -1796,10 +1796,10 @@ BaselineCompiler::emit_JSOP_NEWOBJECT()
{
frame.syncStack(0);
RootedTypeObject type(cx);
if (!types::UseNewTypeForInitializer(script, pc, JSProto_Object)) {
type = types::TypeScript::InitObject(cx, script, pc, JSProto_Object);
if (!type)
RootedObjectGroup group(cx);
if (!types::UseSingletonForInitializer(script, pc, JSProto_Object)) {
group = types::TypeScript::InitGroup(cx, script, pc, JSProto_Object);
if (!group)
return false;
}
@ -1808,21 +1808,21 @@ BaselineCompiler::emit_JSOP_NEWOBJECT()
if (!templateObject)
return false;
if (type) {
templateObject->setType(type);
if (group) {
templateObject->setGroup(group);
} else {
if (!JSObject::setSingletonType(cx, templateObject))
if (!JSObject::setSingleton(cx, templateObject))
return false;
}
// Try to do the allocation inline.
Label done;
if (type && !type->shouldPreTenure() && !templateObject->hasDynamicSlots()) {
if (group && !group->shouldPreTenure() && !templateObject->hasDynamicSlots()) {
Label slowPath;
Register objReg = R0.scratchReg();
Register tempReg = R1.scratchReg();
masm.movePtr(ImmGCPtr(type), tempReg);
masm.branchTest32(Assembler::NonZero, Address(tempReg, types::TypeObject::offsetOfFlags()),
masm.movePtr(ImmGCPtr(group), tempReg);
masm.branchTest32(Assembler::NonZero, Address(tempReg, types::ObjectGroup::offsetOfFlags()),
Imm32(types::OBJECT_FLAG_PRE_TENURE), &slowPath);
masm.branchPtr(Assembler::NotEqual, AbsoluteAddress(cx->compartment()->addressOfMetadataCallback()),
ImmWord(0), &slowPath);
@ -1847,22 +1847,22 @@ BaselineCompiler::emit_JSOP_NEWINIT()
frame.syncStack(0);
JSProtoKey key = JSProtoKey(GET_UINT8(pc));
RootedTypeObject type(cx);
if (!types::UseNewTypeForInitializer(script, pc, key)) {
type = types::TypeScript::InitObject(cx, script, pc, key);
if (!type)
RootedObjectGroup group(cx);
if (!types::UseSingletonForInitializer(script, pc, key)) {
group = types::TypeScript::InitGroup(cx, script, pc, key);
if (!group)
return false;
}
if (key == JSProto_Array) {
// Pass length in R0, type in R1.
// Pass length in R0, group in R1.
masm.move32(Imm32(0), R0.scratchReg());
masm.movePtr(ImmGCPtr(type), R1.scratchReg());
masm.movePtr(ImmGCPtr(group), R1.scratchReg());
ArrayObject *templateObject = NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject);
if (!templateObject)
return false;
templateObject->setType(type);
templateObject->setGroup(group);
ICNewArray_Fallback::Compiler stubCompiler(cx, templateObject);
if (!emitOpIC(stubCompiler.getStub(&stubSpace_)))
@ -1875,10 +1875,10 @@ BaselineCompiler::emit_JSOP_NEWINIT()
if (!templateObject)
return false;
if (type) {
templateObject->setType(type);
if (group) {
templateObject->setGroup(group);
} else {
if (!JSObject::setSingletonType(cx, templateObject))
if (!JSObject::setSingleton(cx, templateObject))
return false;
}

Просмотреть файл

@ -254,12 +254,12 @@ ICStub::trace(JSTracer *trc)
case ICStub::SetElem_Dense: {
ICSetElem_Dense *setElemStub = toSetElem_Dense();
MarkShape(trc, &setElemStub->shape(), "baseline-getelem-dense-shape");
MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-dense-type");
MarkObjectGroup(trc, &setElemStub->group(), "baseline-setelem-dense-group");
break;
}
case ICStub::SetElem_DenseAdd: {
ICSetElem_DenseAdd *setElemStub = toSetElem_DenseAdd();
MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-denseadd-type");
MarkObjectGroup(trc, &setElemStub->group(), "baseline-setelem-denseadd-group");
JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4);
@ -280,22 +280,22 @@ ICStub::trace(JSTracer *trc)
}
case ICStub::TypeMonitor_SingleObject: {
ICTypeMonitor_SingleObject *monitorStub = toTypeMonitor_SingleObject();
MarkObject(trc, &monitorStub->object(), "baseline-monitor-singleobject");
MarkObject(trc, &monitorStub->object(), "baseline-monitor-singleton");
break;
}
case ICStub::TypeMonitor_TypeObject: {
ICTypeMonitor_TypeObject *monitorStub = toTypeMonitor_TypeObject();
MarkTypeObject(trc, &monitorStub->type(), "baseline-monitor-typeobject");
case ICStub::TypeMonitor_ObjectGroup: {
ICTypeMonitor_ObjectGroup *monitorStub = toTypeMonitor_ObjectGroup();
MarkObjectGroup(trc, &monitorStub->group(), "baseline-monitor-group");
break;
}
case ICStub::TypeUpdate_SingleObject: {
ICTypeUpdate_SingleObject *updateStub = toTypeUpdate_SingleObject();
MarkObject(trc, &updateStub->object(), "baseline-update-singleobject");
MarkObject(trc, &updateStub->object(), "baseline-update-singleton");
break;
}
case ICStub::TypeUpdate_TypeObject: {
ICTypeUpdate_TypeObject *updateStub = toTypeUpdate_TypeObject();
MarkTypeObject(trc, &updateStub->type(), "baseline-update-typeobject");
case ICStub::TypeUpdate_ObjectGroup: {
ICTypeUpdate_ObjectGroup *updateStub = toTypeUpdate_ObjectGroup();
MarkObjectGroup(trc, &updateStub->group(), "baseline-update-group");
break;
}
case ICStub::GetName_Global: {
@ -365,7 +365,7 @@ ICStub::trace(JSTracer *trc)
}
case ICStub::GetProp_Unboxed: {
ICGetProp_Unboxed *propStub = toGetProp_Unboxed();
MarkTypeObject(trc, &propStub->type(), "baseline-getprop-unboxed-stub-type");
MarkObjectGroup(trc, &propStub->group(), "baseline-getprop-unboxed-stub-group");
break;
}
case ICStub::GetProp_TypedObject: {
@ -422,15 +422,15 @@ ICStub::trace(JSTracer *trc)
case ICStub::SetProp_Native: {
ICSetProp_Native *propStub = toSetProp_Native();
MarkShape(trc, &propStub->shape(), "baseline-setpropnative-stub-shape");
MarkTypeObject(trc, &propStub->type(), "baseline-setpropnative-stub-type");
MarkObjectGroup(trc, &propStub->group(), "baseline-setpropnative-stub-group");
break;
}
case ICStub::SetProp_NativeAdd: {
ICSetProp_NativeAdd *propStub = toSetProp_NativeAdd();
MarkTypeObject(trc, &propStub->type(), "baseline-setpropnativeadd-stub-type");
MarkObjectGroup(trc, &propStub->group(), "baseline-setpropnativeadd-stub-group");
MarkShape(trc, &propStub->newShape(), "baseline-setpropnativeadd-stub-newshape");
if (propStub->newType())
MarkTypeObject(trc, &propStub->newType(), "baseline-setpropnativeadd-stub-new-type");
if (propStub->newGroup())
MarkObjectGroup(trc, &propStub->newGroup(), "baseline-setpropnativeadd-stub-new-group");
JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4);
switch (propStub->protoChainDepth()) {
case 0: propStub->toImpl<0>()->traceShapes(trc); break;
@ -444,13 +444,13 @@ ICStub::trace(JSTracer *trc)
}
case ICStub::SetProp_Unboxed: {
ICSetProp_Unboxed *propStub = toSetProp_Unboxed();
MarkTypeObject(trc, &propStub->type(), "baseline-setprop-unboxed-stub-type");
MarkObjectGroup(trc, &propStub->group(), "baseline-setprop-unboxed-stub-group");
break;
}
case ICStub::SetProp_TypedObject: {
ICSetProp_TypedObject *propStub = toSetProp_TypedObject();
MarkShape(trc, &propStub->shape(), "baseline-setprop-typedobject-stub-shape");
MarkTypeObject(trc, &propStub->type(), "baseline-setprop-typedobject-stub-type");
MarkObjectGroup(trc, &propStub->group(), "baseline-setprop-typedobject-stub-group");
break;
}
case ICStub::SetProp_CallScripted: {
@ -1098,7 +1098,7 @@ ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext *cx, JSScript *script,
addOptimizedMonitorStub(stub);
}
} else if (val.toObject().hasSingletonType()) {
} else if (val.toObject().isSingleton()) {
RootedObject obj(cx, &val.toObject());
// Check for existing TypeMonitor stub.
@ -1123,26 +1123,26 @@ ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext *cx, JSScript *script,
addOptimizedMonitorStub(stub);
} else {
RootedTypeObject type(cx, val.toObject().type());
RootedObjectGroup group(cx, val.toObject().group());
// Check for existing TypeMonitor stub.
for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
if (iter->isTypeMonitor_TypeObject() &&
iter->toTypeMonitor_TypeObject()->type() == type)
if (iter->isTypeMonitor_ObjectGroup() &&
iter->toTypeMonitor_ObjectGroup()->group() == group)
{
return true;
}
}
ICTypeMonitor_TypeObject::Compiler compiler(cx, type);
ICTypeMonitor_ObjectGroup::Compiler compiler(cx, group);
ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
if (!stub) {
js_ReportOutOfMemory(cx);
return false;
}
JitSpew(JitSpew_BaselineIC, " Added TypeMonitor stub %p for TypeObject %p",
stub, type.get());
JitSpew(JitSpew_BaselineIC, " Added TypeMonitor stub %p for ObjectGroup %p",
stub, group.get());
addOptimizedMonitorStub(stub);
}
@ -1288,17 +1288,17 @@ ICTypeMonitor_SingleObject::Compiler::generateStubCode(MacroAssembler &masm)
}
bool
ICTypeMonitor_TypeObject::Compiler::generateStubCode(MacroAssembler &masm)
ICTypeMonitor_ObjectGroup::Compiler::generateStubCode(MacroAssembler &masm)
{
Label failure;
masm.branchTestObject(Assembler::NotEqual, R0, &failure);
// Guard on the object's TypeObject.
// Guard on the object's ObjectGroup.
Register obj = masm.extractObject(R0, ExtractTemp0);
masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg());
masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), R1.scratchReg());
Address expectedType(BaselineStubReg, ICTypeMonitor_TypeObject::offsetOfType());
masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure);
Address expectedGroup(BaselineStubReg, ICTypeMonitor_ObjectGroup::offsetOfGroup());
masm.branchPtr(Assembler::NotEqual, expectedGroup, R1.scratchReg(), &failure);
EmitReturnFromIC(masm);
@ -1350,7 +1350,7 @@ ICUpdatedStub::addUpdateStubForValue(JSContext *cx, HandleScript script, HandleO
JitSpew(JitSpew_BaselineIC, " %s TypeUpdate stub %p for primitive type %d",
existingStub ? "Modified existing" : "Created new", stub, type);
} else if (val.toObject().hasSingletonType()) {
} else if (val.toObject().isSingleton()) {
RootedObject obj(cx, &val.toObject());
// Check for existing TypeUpdate stub.
@ -1372,24 +1372,24 @@ ICUpdatedStub::addUpdateStubForValue(JSContext *cx, HandleScript script, HandleO
addOptimizedUpdateStub(stub);
} else {
RootedTypeObject type(cx, val.toObject().type());
RootedObjectGroup group(cx, val.toObject().group());
// Check for existing TypeUpdate stub.
for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
if (iter->isTypeUpdate_TypeObject() &&
iter->toTypeUpdate_TypeObject()->type() == type)
if (iter->isTypeUpdate_ObjectGroup() &&
iter->toTypeUpdate_ObjectGroup()->group() == group)
{
return true;
}
}
ICTypeUpdate_TypeObject::Compiler compiler(cx, type);
ICTypeUpdate_ObjectGroup::Compiler compiler(cx, group);
ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
if (!stub)
return false;
JitSpew(JitSpew_BaselineIC, " Added TypeUpdate stub %p for TypeObject %p",
stub, type.get());
JitSpew(JitSpew_BaselineIC, " Added TypeUpdate stub %p for ObjectGroup %p",
stub, group.get());
addOptimizedUpdateStub(stub);
}
@ -1538,19 +1538,19 @@ ICTypeUpdate_SingleObject::Compiler::generateStubCode(MacroAssembler &masm)
}
bool
ICTypeUpdate_TypeObject::Compiler::generateStubCode(MacroAssembler &masm)
ICTypeUpdate_ObjectGroup::Compiler::generateStubCode(MacroAssembler &masm)
{
Label failure;
masm.branchTestObject(Assembler::NotEqual, R0, &failure);
// Guard on the object's TypeObject.
// Guard on the object's ObjectGroup.
Register obj = masm.extractObject(R0, R1.scratchReg());
masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg());
masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), R1.scratchReg());
Address expectedType(BaselineStubReg, ICTypeUpdate_TypeObject::offsetOfType());
masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure);
Address expectedGroup(BaselineStubReg, ICTypeUpdate_ObjectGroup::offsetOfGroup());
masm.branchPtr(Assembler::NotEqual, expectedGroup, R1.scratchReg(), &failure);
// Type matches, load true into R1.scratchReg() and return.
// Group matches, load true into R1.scratchReg() and return.
masm.mov(ImmWord(1), R1.scratchReg());
EmitReturnFromIC(masm);
@ -1625,11 +1625,11 @@ ICThis_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
static bool
DoNewArray(JSContext *cx, ICNewArray_Fallback *stub, uint32_t length,
HandleTypeObject type, MutableHandleValue res)
HandleObjectGroup group, MutableHandleValue res)
{
FallbackICSpew(cx, stub, "NewArray");
JSObject *obj = NewDenseArray(cx, length, type, NewArray_FullyAllocating);
JSObject *obj = NewDenseArray(cx, length, group, NewArray_FullyAllocating);
if (!obj)
return false;
@ -1637,7 +1637,7 @@ DoNewArray(JSContext *cx, ICNewArray_Fallback *stub, uint32_t length,
return true;
}
typedef bool(*DoNewArrayFn)(JSContext *, ICNewArray_Fallback *, uint32_t, HandleTypeObject,
typedef bool(*DoNewArrayFn)(JSContext *, ICNewArray_Fallback *, uint32_t, HandleObjectGroup,
MutableHandleValue);
static const VMFunction DoNewArrayInfo = FunctionInfo<DoNewArrayFn>(DoNewArray, TailCall);
@ -2061,8 +2061,8 @@ ICCompare_ObjectWithUndefined::Compiler::generateStubCode(MacroAssembler &masm)
// obj != undefined only where !obj->getClass()->emulatesUndefined()
Label emulatesUndefined;
Register obj = masm.extractObject(objectOperand, ExtractTemp0);
masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj);
masm.loadPtr(Address(obj, types::TypeObject::offsetOfClasp()), obj);
masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), obj);
masm.loadPtr(Address(obj, types::ObjectGroup::offsetOfClasp()), obj);
masm.branchTest32(Assembler::NonZero,
Address(obj, Class::offsetOfFlags()),
Imm32(JSCLASS_EMULATES_UNDEFINED),
@ -4520,8 +4520,8 @@ LoadTypedThingLength(MacroAssembler &masm, TypedThingLayout layout, Register obj
break;
case Layout_OutlineTypedObject:
case Layout_InlineTypedObject:
masm.loadPtr(Address(obj, JSObject::offsetOfType()), result);
masm.loadPtr(Address(result, types::TypeObject::offsetOfAddendum()), result);
masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), result);
masm.loadPtr(Address(result, types::ObjectGroup::offsetOfAddendum()), result);
masm.unboxInt32(Address(result, ArrayTypeDescr::offsetOfLength()), result);
break;
default:
@ -4831,13 +4831,13 @@ DenseSetElemStubExists(JSContext *cx, ICStub::Kind kind, ICSetElem_Fallback *stu
for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
if (kind == ICStub::SetElem_Dense && iter->isSetElem_Dense()) {
ICSetElem_Dense *dense = iter->toSetElem_Dense();
if (obj->lastProperty() == dense->shape() && obj->getType(cx) == dense->type())
if (obj->lastProperty() == dense->shape() && obj->getGroup(cx) == dense->group())
return true;
}
if (kind == ICStub::SetElem_DenseAdd && iter->isSetElem_DenseAdd()) {
ICSetElem_DenseAdd *dense = iter->toSetElem_DenseAdd();
if (obj->getType(cx) == dense->type() && SetElemDenseAddHasSameShapes(dense, obj))
if (obj->getGroup(cx) == dense->group() && SetElemDenseAddHasSameShapes(dense, obj))
return true;
}
}
@ -5023,15 +5023,15 @@ DoSetElemFallback(JSContext *cx, BaselineFrame *frame, ICSetElem_Fallback *stub_
&addingCase, &protoDepth))
{
RootedShape shape(cx, obj->lastProperty());
RootedTypeObject type(cx, obj->getType(cx));
if (!type)
RootedObjectGroup group(cx, obj->getGroup(cx));
if (!group)
return false;
if (addingCase && !DenseSetElemStubExists(cx, ICStub::SetElem_DenseAdd, stub, obj)) {
JitSpew(JitSpew_BaselineIC,
" Generating SetElem_DenseAdd stub "
"(shape=%p, type=%p, protoDepth=%u)",
obj->lastProperty(), type.get(), protoDepth);
"(shape=%p, group=%p, protoDepth=%u)",
obj->lastProperty(), group.get(), protoDepth);
ICSetElemDenseAddCompiler compiler(cx, obj, protoDepth);
ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
if (!denseStub)
@ -5044,9 +5044,9 @@ DoSetElemFallback(JSContext *cx, BaselineFrame *frame, ICSetElem_Fallback *stub_
!DenseSetElemStubExists(cx, ICStub::SetElem_Dense, stub, obj))
{
JitSpew(JitSpew_BaselineIC,
" Generating SetElem_Dense stub (shape=%p, type=%p)",
obj->lastProperty(), type.get());
ICSetElem_Dense::Compiler compiler(cx, shape, type);
" Generating SetElem_Dense stub (shape=%p, group=%p)",
obj->lastProperty(), group.get());
ICSetElem_Dense::Compiler compiler(cx, shape, group);
ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
if (!denseStub)
return false;
@ -5193,10 +5193,10 @@ ICSetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
regs = availableGeneralRegs(0);
regs.take(R0);
// Guard that the type object matches.
// Guard that the object group matches.
Register typeReg = regs.takeAny();
masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfType()), typeReg);
masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg,
masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfGroup()), typeReg);
masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfGroup()), typeReg,
&failureUnstow);
regs.add(typeReg);
@ -5361,10 +5361,10 @@ ICSetElemDenseAddCompiler::generateStubCode(MacroAssembler &masm)
regs = availableGeneralRegs(0);
regs.take(R0);
// Guard that the type object matches.
// Guard that the object group matches.
Register typeReg = regs.takeAny();
masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAdd::offsetOfType()), typeReg);
masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg,
masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAdd::offsetOfGroup()), typeReg);
masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfGroup()), typeReg,
&failureUnstow);
regs.add(typeReg);
@ -6359,10 +6359,10 @@ UpdateExistingGenerationalDOMProxyStub(ICGetProp_Fallback *stub,
static bool
HasUnanalyzedNewScript(JSObject *obj)
{
if (obj->hasSingletonType())
if (obj->isSingleton())
return false;
types::TypeNewScript *newScript = obj->type()->newScript();
types::TypeNewScript *newScript = obj->group()->newScript();
if (newScript && !newScript->analyzed())
return true;
@ -6629,7 +6629,7 @@ TryAttachUnboxedGetPropStub(JSContext *cx, HandleScript script,
ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
ICGetProp_Unboxed::Compiler compiler(cx, monitorStub, obj->type(),
ICGetProp_Unboxed::Compiler compiler(cx, monitorStub, obj->group(),
property->offset + UnboxedPlainObject::offsetOfData(),
property->type);
ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
@ -7814,11 +7814,11 @@ ICGetProp_Unboxed::Compiler::generateStubCode(MacroAssembler &masm)
Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
// Object and type guard.
// Object and group guard.
masm.branchTestObject(Assembler::NotEqual, R0, &failure);
Register object = masm.extractObject(R0, ExtractTemp0);
masm.loadPtr(Address(BaselineStubReg, ICGetProp_Unboxed::offsetOfType()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfType()), scratch,
masm.loadPtr(Address(BaselineStubReg, ICGetProp_Unboxed::offsetOfGroup()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfGroup()), scratch,
&failure);
// Get the address being read from.
@ -7932,7 +7932,7 @@ BaselineScript::noteAccessedGetter(uint32_t pcOffset)
// value property.
static bool
TryAttachSetValuePropStub(JSContext *cx, HandleScript script, jsbytecode *pc, ICSetProp_Fallback *stub,
HandleObject obj, HandleShape oldShape, HandleTypeObject oldType, uint32_t oldSlots,
HandleObject obj, HandleShape oldShape, HandleObjectGroup oldGroup, uint32_t oldSlots,
HandlePropertyName name, HandleId id, HandleValue rhs, bool *attached)
{
MOZ_ASSERT(!*attached);
@ -7955,7 +7955,7 @@ TryAttachSetValuePropStub(JSContext *cx, HandleScript script, jsbytecode *pc, IC
// script properties analysis hasn't been performed for yet, as there
// may be a shape change required here afterwards. Pretend we attached
// a stub, though, so the access is not marked as unoptimizable.
if (oldType->newScript() && !oldType->newScript()->analyzed()) {
if (oldGroup->newScript() && !oldGroup->newScript()->analyzed()) {
*attached = true;
return true;
}
@ -7965,7 +7965,7 @@ TryAttachSetValuePropStub(JSContext *cx, HandleScript script, jsbytecode *pc, IC
GetFixedOrDynamicSlotOffset(&obj->as<NativeObject>(), shape->slot(), &isFixedSlot, &offset);
JitSpew(JitSpew_BaselineIC, " Generating SetProp(NativeObject.ADD) stub");
ICSetPropNativeAddCompiler compiler(cx, obj, oldShape, oldType,
ICSetPropNativeAddCompiler compiler(cx, obj, oldShape, oldGroup,
chainDepth, isFixedSlot, offset);
ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
if (!newStub)
@ -8109,7 +8109,7 @@ TryAttachUnboxedSetPropStub(JSContext *cx, HandleScript script,
if (!property)
return true;
ICSetProp_Unboxed::Compiler compiler(cx, obj->type(),
ICSetProp_Unboxed::Compiler compiler(cx, obj->group(),
property->offset + UnboxedPlainObject::offsetOfData(),
property->type);
ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
@ -8154,7 +8154,7 @@ TryAttachTypedObjectSetPropStub(JSContext *cx, HandleScript script,
uint32_t fieldOffset = structDescr->fieldOffset(fieldIndex);
ICSetProp_TypedObject::Compiler compiler(cx, obj->lastProperty(), obj->type(), fieldOffset,
ICSetProp_TypedObject::Compiler compiler(cx, obj->lastProperty(), obj->group(), fieldOffset,
&fieldDescr->as<SimpleTypeDescr>());
ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
if (!newStub)
@ -8201,8 +8201,8 @@ DoSetPropFallback(JSContext *cx, BaselineFrame *frame, ICSetProp_Fallback *stub_
if (!obj)
return false;
RootedShape oldShape(cx, obj->lastProperty());
RootedTypeObject oldType(cx, obj->getType(cx));
if (!oldType)
RootedObjectGroup oldGroup(cx, obj->getGroup(cx));
if (!oldGroup)
return false;
uint32_t oldSlots = obj->isNative() ? obj->as<NativeObject>().numDynamicSlots() : 0;
@ -8262,7 +8262,7 @@ DoSetPropFallback(JSContext *cx, BaselineFrame *frame, ICSetProp_Fallback *stub_
if (!attached &&
lhs.isObject() &&
!TryAttachSetValuePropStub(cx, script, pc, stub, obj, oldShape,
oldType, oldSlots, name, id, rhs, &attached))
oldGroup, oldSlots, name, id, rhs, &attached))
{
return false;
}
@ -8363,9 +8363,9 @@ ICSetProp_Native::Compiler::generateStubCode(MacroAssembler &masm)
masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfShape()), scratch);
masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
// Guard that the type object matches.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfType()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch,
// Guard that the object group matches.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfGroup()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfGroup()), scratch,
&failure);
// Stow both R0 and R1 (object and value).
@ -8459,9 +8459,9 @@ ICSetPropNativeAddCompiler::generateStubCode(MacroAssembler &masm)
masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(0)), scratch);
masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
// Guard that the type object matches.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfType()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch,
// Guard that the object group matches.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfGroup()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfGroup()), scratch,
&failure);
// Stow both R0 and R1 (object and value).
@ -8500,29 +8500,29 @@ ICSetPropNativeAddCompiler::generateStubCode(MacroAssembler &masm)
masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewShape()), scratch);
masm.storePtr(scratch, shapeAddr);
// Try to change the object's type.
Label noTypeChange;
// Try to change the object's group.
Label noGroupChange;
// Check if the cache has a new type to change to.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewType()), scratch);
masm.branchTestPtr(Assembler::Zero, scratch, scratch, &noTypeChange);
// Check if the cache has a new group to change to.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewGroup()), scratch);
masm.branchTestPtr(Assembler::Zero, scratch, scratch, &noGroupChange);
// Check if the old type still has a newScript.
masm.loadPtr(Address(objReg, JSObject::offsetOfType()), scratch);
// Check if the old group still has a newScript.
masm.loadPtr(Address(objReg, JSObject::offsetOfGroup()), scratch);
masm.branchPtr(Assembler::Equal,
Address(scratch, types::TypeObject::offsetOfAddendum()),
Address(scratch, types::ObjectGroup::offsetOfAddendum()),
ImmWord(0),
&noTypeChange);
&noGroupChange);
// Reload the new type from the cache.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewType()), scratch);
// Reload the new group from the cache.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewGroup()), scratch);
// Change the object's type.
Address typeAddr(objReg, JSObject::offsetOfType());
EmitPreBarrier(masm, typeAddr, MIRType_TypeObject);
masm.storePtr(scratch, typeAddr);
// Change the object's group.
Address groupAddr(objReg, JSObject::offsetOfGroup());
EmitPreBarrier(masm, groupAddr, MIRType_ObjectGroup);
masm.storePtr(scratch, groupAddr);
masm.bind(&noTypeChange);
masm.bind(&noGroupChange);
Register holderReg;
regs.add(R0);
@ -8573,10 +8573,10 @@ ICSetProp_Unboxed::Compiler::generateStubCode(MacroAssembler &masm)
GeneralRegisterSet regs(availableGeneralRegs(2));
Register scratch = regs.takeAny();
// Unbox and type guard.
// Unbox and group guard.
Register object = masm.extractObject(R0, ExtractTemp0);
masm.loadPtr(Address(BaselineStubReg, ICSetProp_Unboxed::offsetOfType()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfType()), scratch,
masm.loadPtr(Address(BaselineStubReg, ICSetProp_Unboxed::offsetOfGroup()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfGroup()), scratch,
&failure);
if (needsUpdateStubs()) {
@ -8649,9 +8649,9 @@ ICSetProp_TypedObject::Compiler::generateStubCode(MacroAssembler &masm)
masm.loadPtr(Address(BaselineStubReg, ICSetProp_TypedObject::offsetOfShape()), scratch);
masm.branchTestObjShape(Assembler::NotEqual, object, scratch, &failure);
// Guard that the type object matches.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_TypedObject::offsetOfType()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfType()), scratch,
// Guard that the object group matches.
masm.loadPtr(Address(BaselineStubReg, ICSetProp_TypedObject::offsetOfGroup()), scratch);
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfGroup()), scratch,
&failure);
if (needsUpdateStubs()) {
@ -9039,10 +9039,10 @@ GetTemplateObjectForNative(JSContext *cx, HandleScript script, jsbytecode *pc,
if (!res)
return false;
types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
if (!type)
types::ObjectGroup *group = types::TypeScript::InitGroup(cx, script, pc, JSProto_Array);
if (!group)
return false;
res->setType(type);
res->setGroup(group);
return true;
}
@ -9051,21 +9051,21 @@ GetTemplateObjectForNative(JSContext *cx, HandleScript script, jsbytecode *pc,
if (!res)
return false;
types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
if (!type)
types::ObjectGroup *group = types::TypeScript::InitGroup(cx, script, pc, JSProto_Array);
if (!group)
return false;
res->setType(type);
res->setGroup(group);
return true;
}
if (native == js::array_concat) {
if (args.thisv().isObject() && args.thisv().toObject().is<ArrayObject>() &&
!args.thisv().toObject().hasSingletonType())
!args.thisv().toObject().isSingleton())
{
res.set(NewDenseEmptyArray(cx, args.thisv().toObject().getProto(), TenuredObject));
if (!res)
return false;
res->setType(args.thisv().toObject().type());
res->setGroup(args.thisv().toObject().group());
return true;
}
}
@ -9075,10 +9075,10 @@ GetTemplateObjectForNative(JSContext *cx, HandleScript script, jsbytecode *pc,
if (!res)
return false;
types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
if (!type)
types::ObjectGroup *group = types::TypeScript::InitGroup(cx, script, pc, JSProto_Array);
if (!group)
return false;
res->setType(type);
res->setGroup(group);
return true;
}
@ -9156,9 +9156,9 @@ IsOptimizableCallStringSplit(Value callee, Value thisv, int argc, Value *args)
static bool
TryAttachCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
JSOp op, uint32_t argc, Value *vp, bool constructing, bool isSpread,
bool useNewType)
bool createSingleton)
{
if (useNewType || op == JSOP_EVAL || op == JSOP_STRICTEVAL)
if (createSingleton || op == JSOP_EVAL || op == JSOP_STRICTEVAL)
return true;
if (stub->numOptimizedStubs() >= ICCall_Fallback::MAX_OPTIMIZED_STUBS) {
@ -9189,7 +9189,7 @@ TryAttachCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsb
if (obj->is<ProxyObject>())
return true;
if (JSNative hook = constructing ? obj->constructHook() : obj->callHook()) {
if (op != JSOP_FUNAPPLY && !isSpread && !useNewType) {
if (op != JSOP_FUNAPPLY && !isSpread && !createSingleton) {
RootedObject templateObject(cx);
CallArgs args = CallArgsFromVp(argc, vp);
if (!GetTemplateObjectForClassHook(cx, hook, args, &templateObject))
@ -9275,7 +9275,7 @@ TryAttachCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsb
// stub. After the analysis is performed, CreateThisForFunction may
// start returning objects with a different type, and the Ion
// compiler might get confused.
types::TypeNewScript *newScript = templateObject->type()->newScript();
types::TypeNewScript *newScript = templateObject->group()->newScript();
if (newScript && !newScript->analyzed()) {
// Clear the object just created from the preliminary objects
// on the TypeNewScript, as it will not be used or filled in by
@ -9373,15 +9373,15 @@ CopyArray(JSContext *cx, HandleArrayObject obj, MutableHandleValue result)
uint32_t length = obj->as<ArrayObject>().length();
MOZ_ASSERT(obj->getDenseInitializedLength() == length);
RootedTypeObject type(cx, obj->getType(cx));
if (!type)
RootedObjectGroup group(cx, obj->getGroup(cx));
if (!group)
return false;
RootedArrayObject newObj(cx, NewDenseFullyAllocatedArray(cx, length, nullptr, TenuredObject));
if (!newObj)
return false;
newObj->setType(type);
newObj->setGroup(group);
newObj->setDenseInitializedLength(length);
newObj->initDenseElements(0, obj->getDenseElements(), length);
result.setObject(*newObj);
@ -9473,12 +9473,12 @@ DoCallFallback(JSContext *cx, BaselineFrame *frame, ICCall_Fallback *stub_, uint
return false;
}
// Compute construcing and useNewType flags.
// Compute construcing and useNewGroup flags.
bool constructing = (op == JSOP_NEW);
bool newType = types::UseNewType(cx, script, pc);
bool createSingleton = types::UseSingletonForNewObject(cx, script, pc);
// Try attaching a call stub.
if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, false, newType))
if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, false, createSingleton))
return false;
if (!MaybeCloneFunctionAtCallsite(cx, &callee, script, pc))
@ -11470,9 +11470,9 @@ ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode *stubCode, Handle
obj_(obj)
{ }
ICTypeMonitor_TypeObject::ICTypeMonitor_TypeObject(JitCode *stubCode, HandleTypeObject type)
: ICStub(TypeMonitor_TypeObject, stubCode),
type_(type)
ICTypeMonitor_ObjectGroup::ICTypeMonitor_ObjectGroup(JitCode *stubCode, HandleObjectGroup group)
: ICStub(TypeMonitor_ObjectGroup, stubCode),
group_(group)
{ }
ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode *stubCode, HandleObject obj)
@ -11480,9 +11480,9 @@ ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode *stubCode, HandleOb
obj_(obj)
{ }
ICTypeUpdate_TypeObject::ICTypeUpdate_TypeObject(JitCode *stubCode, HandleTypeObject type)
: ICStub(TypeUpdate_TypeObject, stubCode),
type_(type)
ICTypeUpdate_ObjectGroup::ICTypeUpdate_ObjectGroup(JitCode *stubCode, HandleObjectGroup group)
: ICStub(TypeUpdate_ObjectGroup, stubCode),
group_(group)
{ }
ICGetElemNativeStub::ICGetElemNativeStub(ICStub::Kind kind, JitCode *stubCode,
@ -11591,16 +11591,16 @@ ICGetElem_Arguments::Clone(JSContext *, ICStubSpace *space, ICStub *firstMonitor
return New(space, other.jitCode(), firstMonitorStub, other.which());
}
ICSetElem_Dense::ICSetElem_Dense(JitCode *stubCode, HandleShape shape, HandleTypeObject type)
ICSetElem_Dense::ICSetElem_Dense(JitCode *stubCode, HandleShape shape, HandleObjectGroup group)
: ICUpdatedStub(SetElem_Dense, stubCode),
shape_(shape),
type_(type)
group_(group)
{ }
ICSetElem_DenseAdd::ICSetElem_DenseAdd(JitCode *stubCode, types::TypeObject *type,
ICSetElem_DenseAdd::ICSetElem_DenseAdd(JitCode *stubCode, types::ObjectGroup *group,
size_t protoChainDepth)
: ICUpdatedStub(SetElem_DenseAdd, stubCode),
type_(type)
group_(group)
{
MOZ_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH);
extra_ = protoChainDepth;
@ -11610,11 +11610,11 @@ template <size_t ProtoChainDepth>
ICUpdatedStub *
ICSetElemDenseAddCompiler::getStubSpecific(ICStubSpace *space, const AutoShapeVector *shapes)
{
RootedTypeObject objType(cx, obj_->getType(cx));
if (!objType)
RootedObjectGroup group(cx, obj_->getGroup(cx));
if (!group)
return nullptr;
Rooted<JitCode *> stubCode(cx, getStubCode());
return ICSetElem_DenseAddImpl<ProtoChainDepth>::New(space, stubCode, objType, shapes);
return ICSetElem_DenseAddImpl<ProtoChainDepth>::New(space, stubCode, group, shapes);
}
ICSetElem_TypedArray::ICSetElem_TypedArray(JitCode *stubCode, HandleShape shape, Scalar::Type type,
@ -11803,10 +11803,10 @@ ICGetProp_CallNativePrototype::Clone(JSContext *cx, ICStubSpace *space, ICStub *
holderShape, getter, other.pcOffset_);
}
ICSetProp_Native::ICSetProp_Native(JitCode *stubCode, HandleTypeObject type, HandleShape shape,
ICSetProp_Native::ICSetProp_Native(JitCode *stubCode, HandleObjectGroup group, HandleShape shape,
uint32_t offset)
: ICUpdatedStub(SetProp_Native, stubCode),
type_(type),
group_(group),
shape_(shape),
offset_(offset)
{ }
@ -11814,26 +11814,26 @@ ICSetProp_Native::ICSetProp_Native(JitCode *stubCode, HandleTypeObject type, Han
ICSetProp_Native *
ICSetProp_Native::Compiler::getStub(ICStubSpace *space)
{
RootedTypeObject type(cx, obj_->getType(cx));
if (!type)
RootedObjectGroup group(cx, obj_->getGroup(cx));
if (!group)
return nullptr;
RootedShape shape(cx, obj_->lastProperty());
ICSetProp_Native *stub = ICSetProp_Native::New(space, getStubCode(), type, shape, offset_);
ICSetProp_Native *stub = ICSetProp_Native::New(space, getStubCode(), group, shape, offset_);
if (!stub || !stub->initUpdatingChain(cx, space))
return nullptr;
return stub;
}
ICSetProp_NativeAdd::ICSetProp_NativeAdd(JitCode *stubCode, HandleTypeObject type,
ICSetProp_NativeAdd::ICSetProp_NativeAdd(JitCode *stubCode, HandleObjectGroup group,
size_t protoChainDepth,
HandleShape newShape,
HandleTypeObject newType,
HandleObjectGroup newGroup,
uint32_t offset)
: ICUpdatedStub(SetProp_NativeAdd, stubCode),
type_(type),
group_(group),
newShape_(newShape),
newType_(newType),
newGroup_(newGroup),
offset_(offset)
{
MOZ_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH);
@ -11842,12 +11842,12 @@ ICSetProp_NativeAdd::ICSetProp_NativeAdd(JitCode *stubCode, HandleTypeObject typ
template <size_t ProtoChainDepth>
ICSetProp_NativeAddImpl<ProtoChainDepth>::ICSetProp_NativeAddImpl(JitCode *stubCode,
HandleTypeObject type,
HandleObjectGroup group,
const AutoShapeVector *shapes,
HandleShape newShape,
HandleTypeObject newType,
HandleObjectGroup newGroup,
uint32_t offset)
: ICSetProp_NativeAdd(stubCode, type, ProtoChainDepth, newShape, newType, offset)
: ICSetProp_NativeAdd(stubCode, group, ProtoChainDepth, newShape, newGroup, offset)
{
MOZ_ASSERT(shapes->length() == NumShapes);
for (size_t i = 0; i < NumShapes; i++)
@ -11856,14 +11856,14 @@ ICSetProp_NativeAddImpl<ProtoChainDepth>::ICSetProp_NativeAddImpl(JitCode *stubC
ICSetPropNativeAddCompiler::ICSetPropNativeAddCompiler(JSContext *cx, HandleObject obj,
HandleShape oldShape,
HandleTypeObject oldType,
HandleObjectGroup oldGroup,
size_t protoChainDepth,
bool isFixedSlot,
uint32_t offset)
: ICStubCompiler(cx, ICStub::SetProp_NativeAdd),
obj_(cx, obj),
oldShape_(cx, oldShape),
oldType_(cx, oldType),
oldGroup_(cx, oldGroup),
protoChainDepth_(protoChainDepth),
isFixedSlot_(isFixedSlot),
offset_(offset)

Просмотреть файл

@ -142,8 +142,8 @@ namespace jit {
// between stubs on an IC, but instead are kept track of on a per-stub basis.
//
// This is because the main stubs for the operation will each identify a potentially
// different TypeObject to update. New input types must be tracked on a typeobject-to-
// typeobject basis.
// different ObjectGroup to update. New input types must be tracked on a group-to-
// group basis.
//
// Type-update ICs cannot be called in tail position (they must return to the
// the stub that called them so that the stub may continue to perform its original
@ -336,12 +336,12 @@ class ICEntry
\
_(TypeMonitor_Fallback) \
_(TypeMonitor_SingleObject) \
_(TypeMonitor_TypeObject) \
_(TypeMonitor_ObjectGroup) \
_(TypeMonitor_PrimitiveSet) \
\
_(TypeUpdate_Fallback) \
_(TypeUpdate_SingleObject) \
_(TypeUpdate_TypeObject) \
_(TypeUpdate_ObjectGroup) \
_(TypeUpdate_PrimitiveSet) \
\
_(This_Fallback) \
@ -1562,44 +1562,44 @@ class ICTypeMonitor_SingleObject : public ICStub
};
};
class ICTypeMonitor_TypeObject : public ICStub
class ICTypeMonitor_ObjectGroup : public ICStub
{
friend class ICStubSpace;
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
ICTypeMonitor_TypeObject(JitCode *stubCode, HandleTypeObject type);
ICTypeMonitor_ObjectGroup(JitCode *stubCode, HandleObjectGroup group);
public:
static inline ICTypeMonitor_TypeObject *New(
ICStubSpace *space, JitCode *code, HandleTypeObject type)
static inline ICTypeMonitor_ObjectGroup *New(
ICStubSpace *space, JitCode *code, HandleObjectGroup group)
{
if (!code)
return nullptr;
return space->allocate<ICTypeMonitor_TypeObject>(code, type);
return space->allocate<ICTypeMonitor_ObjectGroup>(code, group);
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
static size_t offsetOfType() {
return offsetof(ICTypeMonitor_TypeObject, type_);
static size_t offsetOfGroup() {
return offsetof(ICTypeMonitor_ObjectGroup, group_);
}
class Compiler : public ICStubCompiler {
protected:
HandleTypeObject type_;
HandleObjectGroup group_;
bool generateStubCode(MacroAssembler &masm);
public:
Compiler(JSContext *cx, HandleTypeObject type)
: ICStubCompiler(cx, TypeMonitor_TypeObject),
type_(type)
Compiler(JSContext *cx, HandleObjectGroup group)
: ICStubCompiler(cx, TypeMonitor_ObjectGroup),
group_(group)
{ }
ICTypeMonitor_TypeObject *getStub(ICStubSpace *space) {
return ICTypeMonitor_TypeObject::New(space, getStubCode(), type_);
ICTypeMonitor_ObjectGroup *getStub(ICStubSpace *space) {
return ICTypeMonitor_ObjectGroup::New(space, getStubCode(), group_);
}
};
};
@ -1725,45 +1725,45 @@ class ICTypeUpdate_SingleObject : public ICStub
};
};
// Type update stub to handle a single TypeObject.
class ICTypeUpdate_TypeObject : public ICStub
// Type update stub to handle a single ObjectGroup.
class ICTypeUpdate_ObjectGroup : public ICStub
{
friend class ICStubSpace;
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
ICTypeUpdate_TypeObject(JitCode *stubCode, HandleTypeObject type);
ICTypeUpdate_ObjectGroup(JitCode *stubCode, HandleObjectGroup group);
public:
static inline ICTypeUpdate_TypeObject *New(ICStubSpace *space, JitCode *code,
HandleTypeObject type)
static inline ICTypeUpdate_ObjectGroup *New(ICStubSpace *space, JitCode *code,
HandleObjectGroup group)
{
if (!code)
return nullptr;
return space->allocate<ICTypeUpdate_TypeObject>(code, type);
return space->allocate<ICTypeUpdate_ObjectGroup>(code, group);
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
static size_t offsetOfType() {
return offsetof(ICTypeUpdate_TypeObject, type_);
static size_t offsetOfGroup() {
return offsetof(ICTypeUpdate_ObjectGroup, group_);
}
class Compiler : public ICStubCompiler {
protected:
HandleTypeObject type_;
HandleObjectGroup group_;
bool generateStubCode(MacroAssembler &masm);
public:
Compiler(JSContext *cx, HandleTypeObject type)
: ICStubCompiler(cx, TypeUpdate_TypeObject),
type_(type)
Compiler(JSContext *cx, HandleObjectGroup group)
: ICStubCompiler(cx, TypeUpdate_ObjectGroup),
group_(group)
{ }
ICTypeUpdate_TypeObject *getStub(ICStubSpace *space) {
return ICTypeUpdate_TypeObject::New(space, getStubCode(), type_);
ICTypeUpdate_ObjectGroup *getStub(ICStubSpace *space) {
return ICTypeUpdate_ObjectGroup::New(space, getStubCode(), group_);
}
};
};
@ -3512,51 +3512,51 @@ class ICSetElem_Dense : public ICUpdatedStub
friend class ICStubSpace;
HeapPtrShape shape_;
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
ICSetElem_Dense(JitCode *stubCode, HandleShape shape, HandleTypeObject type);
ICSetElem_Dense(JitCode *stubCode, HandleShape shape, HandleObjectGroup group);
public:
static inline ICSetElem_Dense *New(ICStubSpace *space, JitCode *code, HandleShape shape,
HandleTypeObject type) {
HandleObjectGroup group) {
if (!code)
return nullptr;
return space->allocate<ICSetElem_Dense>(code, shape, type);
return space->allocate<ICSetElem_Dense>(code, shape, group);
}
static size_t offsetOfShape() {
return offsetof(ICSetElem_Dense, shape_);
}
static size_t offsetOfType() {
return offsetof(ICSetElem_Dense, type_);
static size_t offsetOfGroup() {
return offsetof(ICSetElem_Dense, group_);
}
HeapPtrShape &shape() {
return shape_;
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
class Compiler : public ICStubCompiler {
RootedShape shape_;
// Compiler is only live on stack during compilation, it should
// outlive any RootedTypeObject it's passed. So it can just
// outlive any RootedObjectGroup it's passed. So it can just
// use the handle.
HandleTypeObject type_;
HandleObjectGroup group_;
bool generateStubCode(MacroAssembler &masm);
public:
Compiler(JSContext *cx, Shape *shape, HandleTypeObject type)
Compiler(JSContext *cx, Shape *shape, HandleObjectGroup group)
: ICStubCompiler(cx, ICStub::SetElem_Dense),
shape_(cx, shape),
type_(type)
group_(group)
{}
ICUpdatedStub *getStub(ICStubSpace *space) {
ICSetElem_Dense *stub = ICSetElem_Dense::New(space, getStubCode(), shape_, type_);
ICSetElem_Dense *stub = ICSetElem_Dense::New(space, getStubCode(), shape_, group_);
if (!stub || !stub->initUpdatingChain(cx, space))
return nullptr;
return stub;
@ -3574,17 +3574,17 @@ class ICSetElem_DenseAdd : public ICUpdatedStub
static const size_t MAX_PROTO_CHAIN_DEPTH = 4;
protected:
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
ICSetElem_DenseAdd(JitCode *stubCode, types::TypeObject *type, size_t protoChainDepth);
ICSetElem_DenseAdd(JitCode *stubCode, types::ObjectGroup *group, size_t protoChainDepth);
public:
static size_t offsetOfType() {
return offsetof(ICSetElem_DenseAdd, type_);
static size_t offsetOfGroup() {
return offsetof(ICSetElem_DenseAdd, group_);
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
size_t protoChainDepth() const {
MOZ_ASSERT(extra_ <= MAX_PROTO_CHAIN_DEPTH);
@ -3611,9 +3611,9 @@ class ICSetElem_DenseAddImpl : public ICSetElem_DenseAdd
static const size_t NumShapes = ProtoChainDepth + 1;
mozilla::Array<HeapPtrShape, NumShapes> shapes_;
ICSetElem_DenseAddImpl(JitCode *stubCode, types::TypeObject *type,
ICSetElem_DenseAddImpl(JitCode *stubCode, types::ObjectGroup *group,
const AutoShapeVector *shapes)
: ICSetElem_DenseAdd(stubCode, type, ProtoChainDepth)
: ICSetElem_DenseAdd(stubCode, group, ProtoChainDepth)
{
MOZ_ASSERT(shapes->length() == NumShapes);
for (size_t i = 0; i < NumShapes; i++)
@ -3622,12 +3622,12 @@ class ICSetElem_DenseAddImpl : public ICSetElem_DenseAdd
public:
static inline ICSetElem_DenseAddImpl *New(ICStubSpace *space, JitCode *code,
types::TypeObject *type,
types::ObjectGroup *group,
const AutoShapeVector *shapes)
{
if (!code)
return nullptr;
return space->allocate<ICSetElem_DenseAddImpl<ProtoChainDepth> >(code, type, shapes);
return space->allocate<ICSetElem_DenseAddImpl<ProtoChainDepth> >(code, group, shapes);
}
void traceShapes(JSTracer *trc) {
@ -4529,33 +4529,33 @@ class ICGetProp_Unboxed : public ICMonitoredStub
{
friend class ICStubSpace;
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
uint32_t fieldOffset_;
ICGetProp_Unboxed(JitCode *stubCode, ICStub *firstMonitorStub, HandleTypeObject type,
ICGetProp_Unboxed(JitCode *stubCode, ICStub *firstMonitorStub, HandleObjectGroup group,
uint32_t fieldOffset)
: ICMonitoredStub(ICStub::GetProp_Unboxed, stubCode, firstMonitorStub),
type_(type), fieldOffset_(fieldOffset)
group_(group), fieldOffset_(fieldOffset)
{
(void) fieldOffset_; // Silence clang warning
}
public:
static inline ICGetProp_Unboxed *New(ICStubSpace *space, JitCode *code,
ICStub *firstMonitorStub, HandleTypeObject shape,
ICStub *firstMonitorStub, HandleObjectGroup group,
uint32_t fieldOffset)
{
if (!code)
return nullptr;
return space->allocate<ICGetProp_Unboxed>(code, firstMonitorStub, shape, fieldOffset);
return space->allocate<ICGetProp_Unboxed>(code, firstMonitorStub, group, fieldOffset);
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
static size_t offsetOfType() {
return offsetof(ICGetProp_Unboxed, type_);
static size_t offsetOfGroup() {
return offsetof(ICGetProp_Unboxed, group_);
}
static size_t offsetOfFieldOffset() {
return offsetof(ICGetProp_Unboxed, fieldOffset_);
@ -4564,7 +4564,7 @@ class ICGetProp_Unboxed : public ICMonitoredStub
class Compiler : public ICStubCompiler {
protected:
ICStub *firstMonitorStub_;
RootedTypeObject type_;
RootedObjectGroup group_;
uint32_t fieldOffset_;
JSValueType fieldType_;
@ -4576,17 +4576,17 @@ class ICGetProp_Unboxed : public ICMonitoredStub
public:
Compiler(JSContext *cx, ICStub *firstMonitorStub,
types::TypeObject *type, uint32_t fieldOffset, JSValueType fieldType)
types::ObjectGroup *group, uint32_t fieldOffset, JSValueType fieldType)
: ICStubCompiler(cx, ICStub::GetProp_Unboxed),
firstMonitorStub_(firstMonitorStub),
type_(cx, type),
group_(cx, group),
fieldOffset_(fieldOffset),
fieldType_(fieldType)
{}
ICStub *getStub(ICStubSpace *space) {
return ICGetProp_Unboxed::New(space, getStubCode(), firstMonitorStub_,
type_, fieldOffset_);
group_, fieldOffset_);
}
};
};
@ -5298,22 +5298,22 @@ class ICSetProp_Native : public ICUpdatedStub
friend class ICStubSpace;
protected: // Protected to silence Clang warning.
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
HeapPtrShape shape_;
uint32_t offset_;
ICSetProp_Native(JitCode *stubCode, HandleTypeObject type, HandleShape shape, uint32_t offset);
ICSetProp_Native(JitCode *stubCode, HandleObjectGroup group, HandleShape shape, uint32_t offset);
public:
static inline ICSetProp_Native *New(ICStubSpace *space, JitCode *code, HandleTypeObject type,
static inline ICSetProp_Native *New(ICStubSpace *space, JitCode *code, HandleObjectGroup group,
HandleShape shape, uint32_t offset)
{
if (!code)
return nullptr;
return space->allocate<ICSetProp_Native>(code, type, shape, offset);
return space->allocate<ICSetProp_Native>(code, group, shape, offset);
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
HeapPtrShape &shape() {
return shape_;
@ -5324,8 +5324,8 @@ class ICSetProp_Native : public ICUpdatedStub
bool hasPreliminaryObject() const {
return extra_;
}
static size_t offsetOfType() {
return offsetof(ICSetProp_Native, type_);
static size_t offsetOfGroup() {
return offsetof(ICSetProp_Native, group_);
}
static size_t offsetOfShape() {
return offsetof(ICSetProp_Native, shape_);
@ -5367,26 +5367,26 @@ class ICSetProp_NativeAdd : public ICUpdatedStub
static const size_t MAX_PROTO_CHAIN_DEPTH = 4;
protected: // Protected to silence Clang warning.
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
HeapPtrShape newShape_;
HeapPtrTypeObject newType_;
HeapPtrObjectGroup newGroup_;
uint32_t offset_;
ICSetProp_NativeAdd(JitCode *stubCode, HandleTypeObject type, size_t protoChainDepth,
HandleShape newShape, HandleTypeObject newType, uint32_t offset);
ICSetProp_NativeAdd(JitCode *stubCode, HandleObjectGroup group, size_t protoChainDepth,
HandleShape newShape, HandleObjectGroup newGroup, uint32_t offset);
public:
size_t protoChainDepth() const {
return extra_;
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
HeapPtrShape &newShape() {
return newShape_;
}
HeapPtrTypeObject &newType() {
return newType_;
HeapPtrObjectGroup &newGroup() {
return newGroup_;
}
template <size_t ProtoChainDepth>
@ -5395,14 +5395,14 @@ class ICSetProp_NativeAdd : public ICUpdatedStub
return static_cast<ICSetProp_NativeAddImpl<ProtoChainDepth> *>(this);
}
static size_t offsetOfType() {
return offsetof(ICSetProp_NativeAdd, type_);
static size_t offsetOfGroup() {
return offsetof(ICSetProp_NativeAdd, group_);
}
static size_t offsetOfNewShape() {
return offsetof(ICSetProp_NativeAdd, newShape_);
}
static size_t offsetOfNewType() {
return offsetof(ICSetProp_NativeAdd, newType_);
static size_t offsetOfNewGroup() {
return offsetof(ICSetProp_NativeAdd, newGroup_);
}
static size_t offsetOfOffset() {
return offsetof(ICSetProp_NativeAdd, offset_);
@ -5417,20 +5417,20 @@ class ICSetProp_NativeAddImpl : public ICSetProp_NativeAdd
static const size_t NumShapes = ProtoChainDepth + 1;
mozilla::Array<HeapPtrShape, NumShapes> shapes_;
ICSetProp_NativeAddImpl(JitCode *stubCode, HandleTypeObject type,
ICSetProp_NativeAddImpl(JitCode *stubCode, HandleObjectGroup group,
const AutoShapeVector *shapes,
HandleShape newShape, HandleTypeObject newType, uint32_t offset);
HandleShape newShape, HandleObjectGroup newGroup, uint32_t offset);
public:
static inline ICSetProp_NativeAddImpl *New(
ICStubSpace *space, JitCode *code, HandleTypeObject type,
ICStubSpace *space, JitCode *code, HandleObjectGroup group,
const AutoShapeVector *shapes, HandleShape newShape,
HandleTypeObject newType, uint32_t offset)
HandleObjectGroup newGroup, uint32_t offset)
{
if (!code)
return nullptr;
return space->allocate<ICSetProp_NativeAddImpl<ProtoChainDepth> >(
code, type, shapes, newShape, newType, offset);
code, group, shapes, newShape, newGroup, offset);
}
void traceShapes(JSTracer *trc) {
@ -5447,7 +5447,7 @@ class ICSetPropNativeAddCompiler : public ICStubCompiler
{
RootedObject obj_;
RootedShape oldShape_;
RootedTypeObject oldType_;
RootedObjectGroup oldGroup_;
size_t protoChainDepth_;
bool isFixedSlot_;
uint32_t offset_;
@ -5462,26 +5462,26 @@ class ICSetPropNativeAddCompiler : public ICStubCompiler
public:
ICSetPropNativeAddCompiler(JSContext *cx, HandleObject obj,
HandleShape oldShape, HandleTypeObject oldType,
HandleShape oldShape, HandleObjectGroup oldGroup,
size_t protoChainDepth, bool isFixedSlot, uint32_t offset);
template <size_t ProtoChainDepth>
ICUpdatedStub *getStubSpecific(ICStubSpace *space, const AutoShapeVector *shapes)
{
RootedTypeObject newType(cx, obj_->getType(cx));
if (!newType)
RootedObjectGroup newGroup(cx, obj_->getGroup(cx));
if (!newGroup)
return nullptr;
// Only specify newType when the object's type changes due to the
// Only specify newGroup when the object's group changes due to the
// object becoming fully initialized per the acquired properties
// analysis.
if (newType == oldType_)
newType = nullptr;
if (newGroup == oldGroup_)
newGroup = nullptr;
RootedShape newShape(cx, obj_->lastProperty());
return ICSetProp_NativeAddImpl<ProtoChainDepth>::New(
space, getStubCode(), oldType_, shapes, newShape, newType, offset_);
space, getStubCode(), oldGroup_, shapes, newShape, newGroup, offset_);
}
ICUpdatedStub *getStub(ICStubSpace *space);
@ -5491,12 +5491,12 @@ class ICSetProp_Unboxed : public ICUpdatedStub
{
friend class ICStubSpace;
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
uint32_t fieldOffset_;
ICSetProp_Unboxed(JitCode *stubCode, HandleTypeObject type, uint32_t fieldOffset)
ICSetProp_Unboxed(JitCode *stubCode, HandleObjectGroup group, uint32_t fieldOffset)
: ICUpdatedStub(ICStub::SetProp_Unboxed, stubCode),
type_(type),
group_(group),
fieldOffset_(fieldOffset)
{
(void) fieldOffset_; // Silence clang warning
@ -5504,19 +5504,19 @@ class ICSetProp_Unboxed : public ICUpdatedStub
public:
static inline ICSetProp_Unboxed *New(ICStubSpace *space, JitCode *code,
HandleTypeObject type, uint32_t fieldOffset)
HandleObjectGroup group, uint32_t fieldOffset)
{
if (!code)
return nullptr;
return space->allocate<ICSetProp_Unboxed>(code, type, fieldOffset);
return space->allocate<ICSetProp_Unboxed>(code, group, fieldOffset);
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
static size_t offsetOfType() {
return offsetof(ICSetProp_Unboxed, type_);
static size_t offsetOfGroup() {
return offsetof(ICSetProp_Unboxed, group_);
}
static size_t offsetOfFieldOffset() {
return offsetof(ICSetProp_Unboxed, fieldOffset_);
@ -5524,7 +5524,7 @@ class ICSetProp_Unboxed : public ICUpdatedStub
class Compiler : public ICStubCompiler {
protected:
RootedTypeObject type_;
RootedObjectGroup group_;
uint32_t fieldOffset_;
JSValueType fieldType_;
@ -5536,17 +5536,17 @@ class ICSetProp_Unboxed : public ICUpdatedStub
}
public:
Compiler(JSContext *cx, types::TypeObject *type, uint32_t fieldOffset,
Compiler(JSContext *cx, types::ObjectGroup *group, uint32_t fieldOffset,
JSValueType fieldType)
: ICStubCompiler(cx, ICStub::SetProp_Unboxed),
type_(cx, type),
group_(cx, group),
fieldOffset_(fieldOffset),
fieldType_(fieldType)
{}
ICUpdatedStub *getStub(ICStubSpace *space) {
ICUpdatedStub *stub = ICSetProp_Unboxed::New(space, getStubCode(),
type_, fieldOffset_);
group_, fieldOffset_);
if (!stub || !stub->initUpdatingChain(cx, space))
return nullptr;
return stub;
@ -5563,15 +5563,15 @@ class ICSetProp_TypedObject : public ICUpdatedStub
friend class ICStubSpace;
HeapPtrShape shape_;
HeapPtrTypeObject type_;
HeapPtrObjectGroup group_;
uint32_t fieldOffset_;
bool isObjectReference_;
ICSetProp_TypedObject(JitCode *stubCode, HandleShape shape, HandleTypeObject type,
ICSetProp_TypedObject(JitCode *stubCode, HandleShape shape, HandleObjectGroup group,
uint32_t fieldOffset, bool isObjectReference)
: ICUpdatedStub(ICStub::SetProp_TypedObject, stubCode),
shape_(shape),
type_(type),
group_(group),
fieldOffset_(fieldOffset),
isObjectReference_(isObjectReference)
{
@ -5580,20 +5580,20 @@ class ICSetProp_TypedObject : public ICUpdatedStub
public:
static inline ICSetProp_TypedObject *New(ICStubSpace *space, JitCode *code,
HandleShape shape, HandleTypeObject type,
HandleShape shape, HandleObjectGroup group,
uint32_t fieldOffset, bool isObjectReference)
{
if (!code)
return nullptr;
return space->allocate<ICSetProp_TypedObject>(code, shape, type,
return space->allocate<ICSetProp_TypedObject>(code, shape, group,
fieldOffset, isObjectReference);
}
HeapPtrShape &shape() {
return shape_;
}
HeapPtrTypeObject &type() {
return type_;
HeapPtrObjectGroup &group() {
return group_;
}
bool isObjectReference() {
return isObjectReference_;
@ -5602,8 +5602,8 @@ class ICSetProp_TypedObject : public ICUpdatedStub
static size_t offsetOfShape() {
return offsetof(ICSetProp_TypedObject, shape_);
}
static size_t offsetOfType() {
return offsetof(ICSetProp_TypedObject, type_);
static size_t offsetOfGroup() {
return offsetof(ICSetProp_TypedObject, group_);
}
static size_t offsetOfFieldOffset() {
return offsetof(ICSetProp_TypedObject, fieldOffset_);
@ -5612,7 +5612,7 @@ class ICSetProp_TypedObject : public ICUpdatedStub
class Compiler : public ICStubCompiler {
protected:
RootedShape shape_;
RootedTypeObject type_;
RootedObjectGroup group_;
uint32_t fieldOffset_;
TypedThingLayout layout_;
Rooted<SimpleTypeDescr *> fieldDescr_;
@ -5626,11 +5626,11 @@ class ICSetProp_TypedObject : public ICUpdatedStub
}
public:
Compiler(JSContext *cx, Shape *shape, types::TypeObject *type, uint32_t fieldOffset,
Compiler(JSContext *cx, Shape *shape, types::ObjectGroup *group, uint32_t fieldOffset,
SimpleTypeDescr *fieldDescr)
: ICStubCompiler(cx, ICStub::SetProp_TypedObject),
shape_(cx, shape),
type_(cx, type),
group_(cx, group),
fieldOffset_(fieldOffset),
layout_(GetTypedThingLayout(shape->getObjectClass())),
fieldDescr_(cx, fieldDescr)
@ -5640,7 +5640,7 @@ class ICSetProp_TypedObject : public ICUpdatedStub
bool isObjectReference =
fieldDescr_->is<ReferenceTypeDescr>() &&
fieldDescr_->as<ReferenceTypeDescr>().type() == ReferenceTypeDescr::TYPE_OBJECT;
ICUpdatedStub *stub = ICSetProp_TypedObject::New(space, getStubCode(), shape_, type_,
ICUpdatedStub *stub = ICSetProp_TypedObject::New(space, getStubCode(), shape_, group_,
fieldOffset_, isObjectReference);
if (!stub || !stub->initUpdatingChain(cx, space))
return nullptr;

Просмотреть файл

@ -82,13 +82,13 @@ SetElemICInspector::sawTypedArrayWrite() const
bool
BaselineInspector::maybeInfoForPropertyOp(jsbytecode *pc,
ShapeVector &nativeShapes,
TypeObjectVector &unboxedTypes)
ObjectGroupVector &unboxedGroups)
{
// Return lists of native shapes and unboxed objects seen by the baseline
// IC for the current op. Empty lists indicate no shapes/types are known,
// or there was an uncacheable access.
MOZ_ASSERT(nativeShapes.empty());
MOZ_ASSERT(unboxedTypes.empty());
MOZ_ASSERT(unboxedGroups.empty());
if (!hasBaselineScript())
return true;
@ -99,23 +99,23 @@ BaselineInspector::maybeInfoForPropertyOp(jsbytecode *pc,
ICStub *stub = entry.firstStub();
while (stub->next()) {
Shape *shape = nullptr;
types::TypeObject *type = nullptr;
types::ObjectGroup *group = nullptr;
if (stub->isGetProp_Native()) {
shape = stub->toGetProp_Native()->shape();
} else if (stub->isSetProp_Native()) {
shape = stub->toSetProp_Native()->shape();
} else if (stub->isGetProp_Unboxed()) {
type = stub->toGetProp_Unboxed()->type();
group = stub->toGetProp_Unboxed()->group();
} else if (stub->isSetProp_Unboxed()) {
type = stub->toSetProp_Unboxed()->type();
group = stub->toSetProp_Unboxed()->group();
} else {
nativeShapes.clear();
unboxedTypes.clear();
unboxedGroups.clear();
return true;
}
// Don't add the same shape/type twice (this can happen if there are
// multiple SetProp_Native stubs with different TypeObject's).
// Don't add the same shape/group twice (this can happen if there are
// multiple SetProp_Native stubs with different ObjectGroups).
if (shape) {
bool found = false;
for (size_t i = 0; i < nativeShapes.length(); i++) {
@ -128,13 +128,13 @@ BaselineInspector::maybeInfoForPropertyOp(jsbytecode *pc,
return false;
} else {
bool found = false;
for (size_t i = 0; i < unboxedTypes.length(); i++) {
if (unboxedTypes[i] == type) {
for (size_t i = 0; i < unboxedGroups.length(); i++) {
if (unboxedGroups[i] == group) {
found = true;
break;
}
}
if (!found && !unboxedTypes.append(type))
if (!found && !unboxedGroups.append(group))
return false;
}
@ -144,19 +144,19 @@ BaselineInspector::maybeInfoForPropertyOp(jsbytecode *pc,
if (stub->isGetProp_Fallback()) {
if (stub->toGetProp_Fallback()->hadUnoptimizableAccess()) {
nativeShapes.clear();
unboxedTypes.clear();
unboxedGroups.clear();
}
} else {
if (stub->toSetProp_Fallback()->hadUnoptimizableAccess()) {
nativeShapes.clear();
unboxedTypes.clear();
unboxedGroups.clear();
}
}
// Don't inline if there are more than 5 shapes/types.
if (nativeShapes.length() + unboxedTypes.length() > 5) {
// Don't inline if there are more than 5 shapes/groups.
if (nativeShapes.length() + unboxedGroups.length() > 5) {
nativeShapes.clear();
unboxedTypes.clear();
unboxedGroups.clear();
}
return true;

Просмотреть файл

@ -93,8 +93,10 @@ class BaselineInspector
public:
typedef Vector<Shape *, 4, JitAllocPolicy> ShapeVector;
typedef Vector<types::TypeObject *, 4, JitAllocPolicy> TypeObjectVector;
bool maybeInfoForPropertyOp(jsbytecode *pc, ShapeVector &nativeShapes, TypeObjectVector &unboxedTypes);
typedef Vector<types::ObjectGroup *, 4, JitAllocPolicy> ObjectGroupVector;
bool maybeInfoForPropertyOp(jsbytecode *pc,
ShapeVector &nativeShapes,
ObjectGroupVector &unboxedGroups);
SetElemICInspector setElemICInspector(jsbytecode *pc) {
return makeICInspector<SetElemICInspector>(pc, ICStub::SetElem_Fallback);

Просмотреть файл

@ -723,9 +723,9 @@ CodeGenerator::visitFunctionDispatch(LFunctionDispatch *lir)
for (size_t i = 0; i < casesWithFallback - 1; i++) {
MOZ_ASSERT(i < mir->numCases());
LBlock *target = skipTrivialBlocks(mir->getCaseBlock(i))->lir();
if (types::TypeObject *funcType = mir->getCaseTypeObject(i)) {
masm.branchPtr(Assembler::Equal, Address(input, JSObject::offsetOfType()),
ImmGCPtr(funcType), target->label());
if (types::ObjectGroup *funcGroup = mir->getCaseObjectGroup(i)) {
masm.branchPtr(Assembler::Equal, Address(input, JSObject::offsetOfGroup()),
ImmGCPtr(funcGroup), target->label());
} else {
JSFunction *func = mir->getCase(i);
masm.branchPtr(Assembler::Equal, input, ImmGCPtr(func), target->label());
@ -737,17 +737,17 @@ CodeGenerator::visitFunctionDispatch(LFunctionDispatch *lir)
}
void
CodeGenerator::visitTypeObjectDispatch(LTypeObjectDispatch *lir)
CodeGenerator::visitObjectGroupDispatch(LObjectGroupDispatch *lir)
{
MTypeObjectDispatch *mir = lir->mir();
MObjectGroupDispatch *mir = lir->mir();
Register input = ToRegister(lir->input());
Register temp = ToRegister(lir->temp());
// Hold the incoming TypeObject.
// Hold the incoming ObjectGroup.
masm.loadPtr(Address(input, JSObject::offsetOfType()), temp);
masm.loadPtr(Address(input, JSObject::offsetOfGroup()), temp);
// Compare TypeObjects.
// Compare ObjectGroups.
MacroAssembler::BranchGCPtr lastBranch;
LBlock *lastBlock = nullptr;
@ -764,8 +764,8 @@ CodeGenerator::visitTypeObjectDispatch(LTypeObjectDispatch *lir)
if (lastBranch.isInitialized())
lastBranch.emit(masm);
types::TypeObject *typeObj = propTable->getTypeObject(j);
lastBranch = MacroAssembler::BranchGCPtr(Assembler::Equal, temp, ImmGCPtr(typeObj),
types::ObjectGroup *group = propTable->getObjectGroup(j);
lastBranch = MacroAssembler::BranchGCPtr(Assembler::Equal, temp, ImmGCPtr(group),
target->label());
lastBlock = target;
found = true;
@ -1704,7 +1704,7 @@ CodeGenerator::visitLambdaArrow(LLambdaArrow *lir)
(ArgList(), ImmGCPtr(info.fun), scopeChain, thisv),
StoreRegisterTo(output));
MOZ_ASSERT(!info.useNewTypeForClone);
MOZ_ASSERT(!info.useSingletonForClone);
if (info.singletonType) {
// If the function has a singleton type, this instruction will only be
@ -2234,26 +2234,26 @@ CodeGenerator::emitGetPropertyPolymorphic(LInstruction *ins, Register obj, Regis
{
MGetPropertyPolymorphic *mir = ins->mirRaw()->toGetPropertyPolymorphic();
size_t total = mir->numUnboxedTypes() + mir->numShapes();
size_t total = mir->numUnboxedGroups() + mir->numShapes();
MOZ_ASSERT(total > 1);
bool typeInScratch = mir->numUnboxedTypes() > 1;
bool groupInScratch = mir->numUnboxedGroups() > 1;
bool shapeInScratch = mir->numShapes() > 1;
Label done;
for (size_t i = 0; i < total; i++) {
bool unboxedType = i < mir->numUnboxedTypes();
bool unboxedGroup = i < mir->numUnboxedGroups();
ImmGCPtr comparePtr = unboxedType
? ImmGCPtr(mir->unboxedType(i))
: ImmGCPtr(mir->objShape(i - mir->numUnboxedTypes()));
Address addr(obj, unboxedType ? JSObject::offsetOfType() : JSObject::offsetOfShape());
ImmGCPtr comparePtr = unboxedGroup
? ImmGCPtr(mir->unboxedGroup(i))
: ImmGCPtr(mir->objShape(i - mir->numUnboxedGroups()));
Address addr(obj, unboxedGroup ? JSObject::offsetOfGroup() : JSObject::offsetOfShape());
if ((i == 0 && typeInScratch) || (i == mir->numUnboxedTypes() && shapeInScratch))
if ((i == 0 && groupInScratch) || (i == mir->numUnboxedGroups() && shapeInScratch))
masm.loadPtr(addr, scratch);
bool inScratch = unboxedType ? typeInScratch : shapeInScratch;
bool inScratch = unboxedGroup ? groupInScratch : shapeInScratch;
Label next;
if (i == total - 1) {
@ -2268,14 +2268,14 @@ CodeGenerator::emitGetPropertyPolymorphic(LInstruction *ins, Register obj, Regis
masm.branchPtr(Assembler::NotEqual, addr, comparePtr, &next);
}
if (unboxedType) {
if (unboxedGroup) {
const UnboxedLayout::Property *property =
mir->unboxedType(i)->unboxedLayout().lookup(mir->name());
mir->unboxedGroup(i)->unboxedLayout().lookup(mir->name());
Address propertyAddr(obj, UnboxedPlainObject::offsetOfData() + property->offset);
masm.loadUnboxedProperty(propertyAddr, property->type, output);
} else {
Shape *shape = mir->shape(i - mir->numUnboxedTypes());
Shape *shape = mir->shape(i - mir->numUnboxedGroups());
if (shape->slot() < shape->numFixedSlots()) {
// Fixed slot.
masm.loadTypedOrValue(Address(obj, NativeObject::getFixedSlotOffset(shape->slot())),
@ -2321,25 +2321,25 @@ CodeGenerator::emitSetPropertyPolymorphic(LInstruction *ins, Register obj, Regis
{
MSetPropertyPolymorphic *mir = ins->mirRaw()->toSetPropertyPolymorphic();
size_t total = mir->numUnboxedTypes() + mir->numShapes();
size_t total = mir->numUnboxedGroups() + mir->numShapes();
MOZ_ASSERT(total > 1);
bool typeInScratch = mir->numUnboxedTypes() > 1;
bool groupInScratch = mir->numUnboxedGroups() > 1;
bool shapeInScratch = mir->numShapes() > 1;
Label done;
for (size_t i = 0; i < total; i++) {
bool unboxedType = i < mir->numUnboxedTypes();
bool unboxedGroup = i < mir->numUnboxedGroups();
ImmGCPtr comparePtr = unboxedType
? ImmGCPtr(mir->unboxedType(i))
: ImmGCPtr(mir->objShape(i - mir->numUnboxedTypes()));
Address addr(obj, unboxedType ? JSObject::offsetOfType() : JSObject::offsetOfShape());
ImmGCPtr comparePtr = unboxedGroup
? ImmGCPtr(mir->unboxedGroup(i))
: ImmGCPtr(mir->objShape(i - mir->numUnboxedGroups()));
Address addr(obj, unboxedGroup ? JSObject::offsetOfGroup() : JSObject::offsetOfShape());
if ((i == 0 && typeInScratch) || (i == mir->numUnboxedTypes() && shapeInScratch))
if ((i == 0 && groupInScratch) || (i == mir->numUnboxedGroups() && shapeInScratch))
masm.loadPtr(addr, scratch);
bool inScratch = unboxedType ? typeInScratch : shapeInScratch;
bool inScratch = unboxedGroup ? groupInScratch : shapeInScratch;
Label next;
if (i == total - 1) {
@ -2354,9 +2354,9 @@ CodeGenerator::emitSetPropertyPolymorphic(LInstruction *ins, Register obj, Regis
masm.branchPtr(Assembler::NotEqual, addr, comparePtr, &next);
}
if (unboxedType) {
if (unboxedGroup) {
const UnboxedLayout::Property *property =
mir->unboxedType(i)->unboxedLayout().lookup(mir->name());
mir->unboxedGroup(i)->unboxedLayout().lookup(mir->name());
Address propertyAddr(obj, UnboxedPlainObject::offsetOfData() + property->offset);
if (property->type == JSVAL_TYPE_OBJECT)
@ -2368,7 +2368,7 @@ CodeGenerator::emitSetPropertyPolymorphic(LInstruction *ins, Register obj, Regis
masm.storeUnboxedProperty(propertyAddr, property->type, value, nullptr);
} else {
Shape *shape = mir->shape(i - mir->numUnboxedTypes());
Shape *shape = mir->shape(i - mir->numUnboxedGroups());
if (shape->slot() < shape->numFixedSlots()) {
// Fixed slot.
Address addr(obj, NativeObject::getFixedSlotOffset(shape->slot()));
@ -3676,11 +3676,11 @@ CodeGenerator::emitObjectOrStringResultChecks(LInstruction *lir, MDefinition *mi
masm.bind(&miss);
// Type set guards might miss when an object's type changes and its
// Type set guards might miss when an object's group changes and its
// properties become unknown, so check for this case.
masm.loadPtr(Address(output, JSObject::offsetOfType()), temp);
masm.loadPtr(Address(output, JSObject::offsetOfGroup()), temp);
masm.branchTestPtr(Assembler::NonZero,
Address(temp, types::TypeObject::offsetOfFlags()),
Address(temp, types::ObjectGroup::offsetOfFlags()),
Imm32(types::OBJECT_FLAG_UNKNOWN_PROPERTIES), &ok);
masm.assumeUnreachable("MIR instruction returned object with unexpected type");
@ -3753,14 +3753,14 @@ CodeGenerator::emitValueResultChecks(LInstruction *lir, MDefinition *mir)
masm.bind(&miss);
// Type set guards might miss when an object's type changes and its
// Type set guards might miss when an object's group changes and its
// properties become unknown, so check for this case.
Label realMiss;
masm.branchTestObject(Assembler::NotEqual, output, &realMiss);
Register payload = masm.extractObject(output, temp1);
masm.loadPtr(Address(payload, JSObject::offsetOfType()), temp1);
masm.loadPtr(Address(payload, JSObject::offsetOfGroup()), temp1);
masm.branchTestPtr(Assembler::NonZero,
Address(temp1, types::TypeObject::offsetOfFlags()),
Address(temp1, types::ObjectGroup::offsetOfFlags()),
Imm32(types::OBJECT_FLAG_UNKNOWN_PROPERTIES), &ok);
masm.bind(&realMiss);
@ -3934,7 +3934,7 @@ class OutOfLineNewArray : public OutOfLineCodeBase<CodeGenerator>
}
};
typedef ArrayObject *(*NewDenseArrayFn)(ExclusiveContext *, uint32_t, HandleTypeObject,
typedef ArrayObject *(*NewDenseArrayFn)(ExclusiveContext *, uint32_t, HandleObjectGroup,
AllocatingBehaviour);
static const VMFunction NewDenseArrayInfo = FunctionInfo<NewDenseArrayFn>(NewDenseArray);
@ -3947,11 +3947,11 @@ CodeGenerator::visitNewArrayCallVM(LNewArray *lir)
saveLive(lir);
JSObject *templateObject = lir->mir()->templateObject();
types::TypeObject *type =
templateObject->hasSingletonType() ? nullptr : templateObject->type();
types::ObjectGroup *group =
templateObject->isSingleton() ? nullptr : templateObject->group();
pushArg(Imm32(lir->mir()->allocatingBehaviour()));
pushArg(ImmGCPtr(type));
pushArg(ImmGCPtr(group));
pushArg(Imm32(lir->mir()->count()));
callVM(NewDenseArrayInfo, lir);
@ -4067,7 +4067,7 @@ CodeGenerator::visitNewArrayCopyOnWrite(LNewArrayCopyOnWrite *lir)
masm.bind(ool->rejoin());
}
typedef ArrayObject *(*ArrayConstructorOneArgFn)(JSContext *, HandleTypeObject, int32_t length);
typedef ArrayObject *(*ArrayConstructorOneArgFn)(JSContext *, HandleObjectGroup, int32_t length);
static const VMFunction ArrayConstructorOneArgInfo =
FunctionInfo<ArrayConstructorOneArgFn>(ArrayConstructorOneArg);
@ -4082,7 +4082,7 @@ CodeGenerator::visitNewArrayDynamicLength(LNewArrayDynamicLength *lir)
gc::InitialHeap initialHeap = lir->mir()->initialHeap();
OutOfLineCode *ool = oolCallVM(ArrayConstructorOneArgInfo, lir,
(ArgList(), ImmGCPtr(templateObject->type()), lengthReg),
(ArgList(), ImmGCPtr(templateObject->group()), lengthReg),
StoreRegisterTo(objReg));
size_t numSlots = gc::GetGCKindSlots(templateObject->asTenured().getAllocKind());
@ -4095,7 +4095,7 @@ CodeGenerator::visitNewArrayDynamicLength(LNewArrayDynamicLength *lir)
// use the template object and not allocate the elements, but it's more
// efficient to do a single big allocation than (repeatedly) reallocating
// the array later on when filling it.
if (!templateObject->hasSingletonType() && templateObject->length() <= inlineLength)
if (!templateObject->isSingleton() && templateObject->length() <= inlineLength)
masm.branch32(Assembler::Above, lengthReg, Imm32(templateObject->length()), ool->entry());
else
masm.jump(ool->entry());
@ -4333,12 +4333,12 @@ CodeGenerator::visitSimdUnbox(LSimdUnbox *lir)
Register temp = ToRegister(lir->temp());
Label bail;
// obj->type()
masm.loadPtr(Address(object, JSObject::offsetOfType()), temp);
// obj->group()
masm.loadPtr(Address(object, JSObject::offsetOfGroup()), temp);
// Guard that the object has the same representation as the one produced for
// SIMD value-type.
Address clasp(temp, types::TypeObject::offsetOfClasp());
Address clasp(temp, types::ObjectGroup::offsetOfClasp());
static_assert(!SimdTypeDescr::Opaque, "SIMD objects are transparent");
masm.branchPtr(Assembler::NotEqual, clasp, ImmPtr(&InlineTransparentTypedObject::class_),
&bail);
@ -4346,7 +4346,7 @@ CodeGenerator::visitSimdUnbox(LSimdUnbox *lir)
// obj->type()->typeDescr()
// The previous class pointer comparison implies that the addendumKind is
// Addendum_TypeDescr.
masm.loadPtr(Address(temp, types::TypeObject::offsetOfAddendum()), temp);
masm.loadPtr(Address(temp, types::ObjectGroup::offsetOfAddendum()), temp);
// Check for the /Kind/ reserved slot of the TypeDescr. This is an Int32
// Value which is equivalent to the object class check.
@ -4418,7 +4418,7 @@ CodeGenerator::visitNewDeclEnvObject(LNewDeclEnvObject *lir)
masm.bind(ool->rejoin());
}
typedef JSObject *(*NewCallObjectFn)(JSContext *, HandleShape, HandleTypeObject, uint32_t);
typedef JSObject *(*NewCallObjectFn)(JSContext *, HandleShape, HandleObjectGroup, uint32_t);
static const VMFunction NewCallObjectInfo =
FunctionInfo<NewCallObjectFn>(NewCallObject);
@ -4434,7 +4434,7 @@ CodeGenerator::visitNewCallObject(LNewCallObject *lir)
uint32_t lexicalBegin = script->bindings.aliasedBodyLevelLexicalBegin();
OutOfLineCode *ool = oolCallVM(NewCallObjectInfo, lir,
(ArgList(), ImmGCPtr(templateObj->lastProperty()),
ImmGCPtr(templateObj->type()),
ImmGCPtr(templateObj->group()),
Imm32(lexicalBegin)),
StoreRegisterTo(objReg));
@ -4640,7 +4640,7 @@ CodeGenerator::visitCreateThisWithTemplate(LCreateThisWithTemplate *lir)
JSObject *templateObject = lir->mir()->templateObject();
gc::AllocKind allocKind = templateObject->asTenured().getAllocKind();
gc::InitialHeap initialHeap = lir->mir()->initialHeap();
const js::Class *clasp = templateObject->type()->clasp();
const js::Class *clasp = templateObject->getClass();
Register objReg = ToRegister(lir->output());
Register tempReg = ToRegister(lir->temp());
@ -4809,8 +4809,8 @@ CodeGenerator::visitTypedObjectDescr(LTypedObjectDescr *lir)
Register obj = ToRegister(lir->object());
Register out = ToRegister(lir->output());
masm.loadPtr(Address(obj, JSObject::offsetOfType()), out);
masm.loadPtr(Address(out, types::TypeObject::offsetOfAddendum()), out);
masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), out);
masm.loadPtr(Address(out, types::ObjectGroup::offsetOfAddendum()), out);
}
void
@ -6033,7 +6033,7 @@ CodeGenerator::visitFromCharCode(LFromCharCode *lir)
masm.bind(ool->rejoin());
}
typedef JSObject *(*StringSplitFn)(JSContext *, HandleTypeObject, HandleString, HandleString);
typedef JSObject *(*StringSplitFn)(JSContext *, HandleObjectGroup, HandleString, HandleString);
static const VMFunction StringSplitInfo = FunctionInfo<StringSplitFn>(js::str_split_string);
void
@ -6041,7 +6041,7 @@ CodeGenerator::visitStringSplit(LStringSplit *lir)
{
pushArg(ToRegister(lir->separator()));
pushArg(ToRegister(lir->string()));
pushArg(ImmGCPtr(lir->mir()->typeObject()));
pushArg(ImmGCPtr(lir->mir()->group()));
callVM(StringSplitInfo, lir);
}

Просмотреть файл

@ -92,7 +92,7 @@ class CodeGenerator : public CodeGeneratorSpecific
void visitTestOAndBranch(LTestOAndBranch *lir);
void visitTestVAndBranch(LTestVAndBranch *lir);
void visitFunctionDispatch(LFunctionDispatch *lir);
void visitTypeObjectDispatch(LTypeObjectDispatch *lir);
void visitObjectGroupDispatch(LObjectGroupDispatch *lir);
void visitBooleanToString(LBooleanToString *lir);
void emitIntToString(Register input, Register output, Label *ool);
void visitIntToString(LIntToString *lir);

Просмотреть файл

@ -283,9 +283,9 @@ JitRuntime::initialize(JSContext *cx)
if (!shapePreBarrier_)
return false;
JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for TypeObject");
typeObjectPreBarrier_ = generatePreBarrier(cx, MIRType_TypeObject);
if (!typeObjectPreBarrier_)
JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for ObjectGroup");
objectGroupPreBarrier_ = generatePreBarrier(cx, MIRType_ObjectGroup);
if (!objectGroupPreBarrier_)
return false;
JitSpew(JitSpew_Codegen, "# Emitting malloc stub");
@ -1755,7 +1755,7 @@ OffThreadCompilationAvailable(JSContext *cx)
static void
TrackAllProperties(JSContext *cx, JSObject *obj)
{
MOZ_ASSERT(obj->hasSingletonType());
MOZ_ASSERT(obj->isSingleton());
for (Shape::Range<NoGC> range(obj->lastProperty()); !range.empty(); range.popFront())
types::EnsureTrackPropertyTypes(cx, obj, range.front().propid());
@ -1773,14 +1773,14 @@ TrackPropertiesForSingletonScopes(JSContext *cx, JSScript *script, BaselineFrame
: nullptr;
while (environment && !environment->is<GlobalObject>()) {
if (environment->is<CallObject>() && environment->hasSingletonType())
if (environment->is<CallObject>() && environment->isSingleton())
TrackAllProperties(cx, environment);
environment = environment->enclosingScope();
}
if (baselineFrame) {
JSObject *scope = baselineFrame->scopeChain();
if (scope->is<CallObject>() && scope->hasSingletonType())
if (scope->is<CallObject>() && scope->isSingleton())
TrackAllProperties(cx, scope);
}
}
@ -1908,9 +1908,9 @@ IonCompile(JSContext *cx, JSScript *script,
// Some type was accessed which needs the new script properties
// analysis to be performed. Do this now and we will try to build
// again shortly.
const MIRGenerator::TypeObjectVector &types = builder->abortedNewScriptPropertiesTypes();
for (size_t i = 0; i < types.length(); i++) {
if (!types[i]->newScript()->maybeAnalyze(cx, types[i], nullptr, /* force = */ true))
const MIRGenerator::ObjectGroupVector &groups = builder->abortedNewScriptPropertiesGroups();
for (size_t i = 0; i < groups.length(); i++) {
if (!groups[i]->newScript()->maybeAnalyze(cx, groups[i], nullptr, /* force = */ true))
return AbortReason_Alloc;
}
}

Просмотреть файл

@ -2064,7 +2064,7 @@ IsResumableMIRType(MIRType type)
case MIRType_Elements:
case MIRType_Pointer:
case MIRType_Shape:
case MIRType_TypeObject:
case MIRType_ObjectGroup:
case MIRType_Float32x4:
case MIRType_Int32x4:
case MIRType_Doublex2:
@ -2880,7 +2880,7 @@ jit::ConvertLinearInequality(TempAllocator &alloc, MBasicBlock *block, const Lin
}
static bool
AnalyzePoppedThis(JSContext *cx, types::TypeObject *type,
AnalyzePoppedThis(JSContext *cx, types::ObjectGroup *group,
MDefinition *thisValue, MInstruction *ins, bool definitelyExecuted,
HandlePlainObject baseobj,
Vector<types::TypeNewScript::Initializer> *initializerList,
@ -2929,7 +2929,7 @@ AnalyzePoppedThis(JSContext *cx, types::TypeObject *type,
return true;
RootedId id(cx, NameToId(setprop->name()));
if (!types::AddClearDefiniteGetterSetterForPrototypeChain(cx, type, id)) {
if (!types::AddClearDefiniteGetterSetterForPrototypeChain(cx, group, id)) {
// The prototype chain already contains a getter/setter for this
// property, or type information is too imprecise.
return true;
@ -2989,7 +2989,7 @@ AnalyzePoppedThis(JSContext *cx, types::TypeObject *type,
if (!baseobj->lookup(cx, id) && !accessedProperties->append(get->name()))
return false;
if (!types::AddClearDefiniteGetterSetterForPrototypeChain(cx, type, id)) {
if (!types::AddClearDefiniteGetterSetterForPrototypeChain(cx, group, id)) {
// The |this| value can escape if any property reads it does go
// through a getter.
return true;
@ -3016,7 +3016,7 @@ CmpInstructions(const void *a, const void *b)
bool
jit::AnalyzeNewScriptDefiniteProperties(JSContext *cx, JSFunction *fun,
types::TypeObject *type, HandlePlainObject baseobj,
types::ObjectGroup *group, HandlePlainObject baseobj,
Vector<types::TypeNewScript::Initializer> *initializerList)
{
MOZ_ASSERT(cx->zone()->types.activeAnalysis);
@ -3053,7 +3053,7 @@ jit::AnalyzeNewScriptDefiniteProperties(JSContext *cx, JSFunction *fun,
return true;
}
types::TypeScript::SetThis(cx, script, types::Type::ObjectType(type));
types::TypeScript::SetThis(cx, script, types::Type::ObjectType(group));
MIRGraph graph(&temp);
InlineScriptTree *inlineScriptTree = InlineScriptTree::New(&temp, nullptr, nullptr, script);
@ -3158,7 +3158,7 @@ jit::AnalyzeNewScriptDefiniteProperties(JSContext *cx, JSFunction *fun,
bool handled = false;
size_t slotSpan = baseobj->slotSpan();
if (!AnalyzePoppedThis(cx, type, thisValue, ins, definitelyExecuted,
if (!AnalyzePoppedThis(cx, group, thisValue, ins, definitelyExecuted,
baseobj, initializerList, &accessedProperties, &handled))
{
return false;
@ -3186,7 +3186,7 @@ jit::AnalyzeNewScriptDefiniteProperties(JSContext *cx, JSFunction *fun,
if (MResumePoint *rp = block->callerResumePoint()) {
if (block->numPredecessors() == 1 && block->getPredecessor(0) == rp->block()) {
JSScript *script = rp->block()->info().script();
if (!types::AddClearDefiniteFunctionUsesInScript(cx, type, script, block->info().script()))
if (!types::AddClearDefiniteFunctionUsesInScript(cx, group, script, block->info().script()))
return false;
}
}

Просмотреть файл

@ -170,7 +170,7 @@ ConvertLinearInequality(TempAllocator &alloc, MBasicBlock *block, const LinearSu
bool
AnalyzeNewScriptDefiniteProperties(JSContext *cx, JSFunction *fun,
types::TypeObject *type, HandlePlainObject baseobj,
types::ObjectGroup *group, HandlePlainObject baseobj,
Vector<types::TypeNewScript::Initializer> *initializerList);
bool

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -841,9 +841,9 @@ class IonBuilder
// Inlining helpers.
bool inlineGenericFallback(JSFunction *target, CallInfo &callInfo, MBasicBlock *dispatchBlock,
bool clonedAtCallsite);
bool inlineTypeObjectFallback(CallInfo &callInfo, MBasicBlock *dispatchBlock,
MTypeObjectDispatch *dispatch, MGetPropertyCache *cache,
MBasicBlock **fallbackTarget);
bool inlineObjectGroupFallback(CallInfo &callInfo, MBasicBlock *dispatchBlock,
MObjectGroupDispatch *dispatch, MGetPropertyCache *cache,
MBasicBlock **fallbackTarget);
bool atomicsMeetsPreconditions(CallInfo &callInfo, Scalar::Type *arrayElementType);
void atomicsCheckBounds(CallInfo &callInfo, MInstruction **elements, MDefinition **index);
@ -892,7 +892,7 @@ class IonBuilder
bool freezePropTypeSets(types::TemporaryTypeSet *types,
JSObject *foundProto, PropertyName *name);
bool canInlinePropertyOpShapes(const BaselineInspector::ShapeVector &nativeShapes,
const BaselineInspector::TypeObjectVector &unboxedTypes);
const BaselineInspector::ObjectGroupVector &unboxedGroups);
types::TemporaryTypeSet *bytecodeTypes(jsbytecode *pc);
@ -1076,7 +1076,7 @@ class IonBuilder
// In such cases we do not have read the property, except when the type
// object is unknown.
//
// As an optimization, we can dispatch a call based on the type object,
// As an optimization, we can dispatch a call based on the object group,
// without doing the MGetPropertyCache. This is what is achieved by
// |IonBuilder::inlineCalls|. As we might not know all the functions, we
// are adding a fallback path, where this MGetPropertyCache would be moved

Просмотреть файл

@ -487,8 +487,8 @@ GeneratePrototypeGuards(JSContext *cx, IonScript *ion, MacroAssembler &masm, JSO
if (obj->hasUncacheableProto()) {
// Note: objectReg and scratchReg may be the same register, so we cannot
// use objectReg in the rest of this function.
masm.loadPtr(Address(objectReg, JSObject::offsetOfType()), scratchReg);
Address proto(scratchReg, types::TypeObject::offsetOfProto());
masm.loadPtr(Address(objectReg, JSObject::offsetOfGroup()), scratchReg);
Address proto(scratchReg, types::ObjectGroup::offsetOfProto());
masm.branchPtr(Assembler::NotEqual, proto,
ImmMaybeNurseryPtr(obj->getProto()), failures);
}
@ -500,10 +500,10 @@ GeneratePrototypeGuards(JSContext *cx, IonScript *ion, MacroAssembler &masm, JSO
return;
while (pobj != holder) {
if (pobj->hasUncacheableProto()) {
MOZ_ASSERT(!pobj->hasSingletonType());
MOZ_ASSERT(!pobj->isSingleton());
masm.movePtr(ImmMaybeNurseryPtr(pobj), scratchReg);
Address objType(scratchReg, JSObject::offsetOfType());
masm.branchPtr(Assembler::NotEqual, objType, ImmGCPtr(pobj->type()), failures);
Address groupAddr(scratchReg, JSObject::offsetOfGroup());
masm.branchPtr(Assembler::NotEqual, groupAddr, ImmGCPtr(pobj->group()), failures);
}
pobj = pobj->getProto();
}
@ -788,8 +788,8 @@ GenerateReadSlot(JSContext *cx, IonScript *ion, MacroAssembler &masm,
failures);
} else {
attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
Address(object, JSObject::offsetOfType()),
ImmGCPtr(obj->type()),
Address(object, JSObject::offsetOfGroup()),
ImmGCPtr(obj->group()),
failures);
}
@ -890,8 +890,8 @@ GenerateReadUnboxed(JSContext *cx, IonScript *ion, MacroAssembler &masm,
{
// Guard on the type of the object.
attacher.branchNextStub(masm, Assembler::NotEqual,
Address(object, JSObject::offsetOfType()),
ImmGCPtr(obj->type()));
Address(object, JSObject::offsetOfGroup()),
ImmGCPtr(obj->group()));
Address address(object, UnboxedPlainObject::offsetOfData() + property->offset);
@ -1897,10 +1897,10 @@ CheckTypeSetForWrite(MacroAssembler &masm, JSObject *obj, jsid id,
Register object, ConstantOrRegister value, Label *failure)
{
TypedOrValueRegister valReg = value.reg();
types::TypeObject *type = obj->type();
if (type->unknownProperties())
types::ObjectGroup *group = obj->group();
if (group->unknownProperties())
return;
types::HeapTypeSet *propTypes = type->maybeGetProperty(id);
types::HeapTypeSet *propTypes = group->maybeGetProperty(id);
MOZ_ASSERT(propTypes);
// guardTypeSet can read from type sets without triggering read barriers.
@ -1928,11 +1928,11 @@ GenerateSetSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &att
// We can't do anything that would change the HeapTypeSet, so
// just guard that it's already there.
// Obtain and guard on the TypeObject of the object.
types::TypeObject *type = obj->type();
// Obtain and guard on the ObjectGroup of the object.
types::ObjectGroup *group = obj->group();
masm.branchPtr(Assembler::NotEqual,
Address(object, JSObject::offsetOfType()),
ImmGCPtr(type), &failures);
Address(object, JSObject::offsetOfGroup()),
ImmGCPtr(group), &failures);
if (checkTypeset) {
masm.push(object);
@ -2463,7 +2463,7 @@ SetPropertyIC::attachCallSetter(JSContext *cx, HandleScript outerScript, IonScri
static void
GenerateAddSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
NativeObject *obj, Shape *oldShape, types::TypeObject *oldType,
NativeObject *obj, Shape *oldShape, types::ObjectGroup *oldGroup,
Register object, ConstantOrRegister value,
bool checkTypeset)
{
@ -2472,8 +2472,8 @@ GenerateAddSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &att
Label failures;
// Guard the type of the object
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfType()),
ImmGCPtr(oldType), &failures);
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfGroup()),
ImmGCPtr(oldGroup), &failures);
// Guard shapes along prototype chain.
masm.branchTestObjShape(Assembler::NotEqual, object, oldShape, &failures);
@ -2511,24 +2511,24 @@ GenerateAddSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &att
masm.callPreBarrier(shapeAddr, MIRType_Shape);
masm.storePtr(ImmGCPtr(newShape), shapeAddr);
if (oldType != obj->type()) {
// Changing object's type from a partially to fully initialized type,
// per the acquired properties analysis. Only change the type if the
// old type still has a newScript.
if (oldGroup != obj->group()) {
// Changing object's group from a partially to fully initialized group,
// per the acquired properties analysis. Only change the group if the
// old group still has a newScript.
Label noTypeChange, skipPop;
masm.push(object);
masm.loadPtr(Address(object, JSObject::offsetOfType()), object);
masm.loadPtr(Address(object, JSObject::offsetOfGroup()), object);
masm.branchPtr(Assembler::Equal,
Address(object, types::TypeObject::offsetOfAddendum()),
Address(object, types::ObjectGroup::offsetOfAddendum()),
ImmWord(0),
&noTypeChange);
masm.pop(object);
Address typeAddr(object, JSObject::offsetOfType());
Address groupAddr(object, JSObject::offsetOfGroup());
if (cx->zone()->needsIncrementalBarrier())
masm.callPreBarrier(typeAddr, MIRType_TypeObject);
masm.storePtr(ImmGCPtr(obj->type()), typeAddr);
masm.callPreBarrier(groupAddr, MIRType_ObjectGroup);
masm.storePtr(ImmGCPtr(obj->group()), groupAddr);
masm.jump(&skipPop);
masm.bind(&noTypeChange);
@ -2564,14 +2564,14 @@ GenerateAddSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &att
bool
SetPropertyIC::attachAddSlot(JSContext *cx, HandleScript outerScript, IonScript *ion,
HandleNativeObject obj, HandleShape oldShape, HandleTypeObject oldType,
HandleNativeObject obj, HandleShape oldShape, HandleObjectGroup oldGroup,
bool checkTypeset)
{
MOZ_ASSERT_IF(!needsTypeBarrier(), !checkTypeset);
MacroAssembler masm(cx, ion, outerScript, profilerLeavePc_);
RepatchStubAppender attacher(*this);
GenerateAddSlot(cx, masm, attacher, obj, oldShape, oldType, object(), value(), checkTypeset);
GenerateAddSlot(cx, masm, attacher, obj, oldShape, oldGroup, object(), value(), checkTypeset);
return linkAndAttachStub(cx, masm, attacher, ion, "adding");
}
@ -2579,13 +2579,13 @@ static bool
CanInlineSetPropTypeCheck(JSObject *obj, jsid id, ConstantOrRegister val, bool *checkTypeset)
{
bool shouldCheck = false;
types::TypeObject *type = obj->type();
if (!type->unknownProperties()) {
types::HeapTypeSet *propTypes = type->maybeGetProperty(id);
types::ObjectGroup *group = obj->group();
if (!group->unknownProperties()) {
types::HeapTypeSet *propTypes = group->maybeGetProperty(id);
if (!propTypes)
return false;
if (!propTypes->unknown()) {
if (obj->hasSingletonType() && !propTypes->nonConstantProperty())
if (obj->isSingleton() && !propTypes->nonConstantProperty())
return false;
shouldCheck = true;
if (val.constant()) {
@ -2695,7 +2695,7 @@ IsPropertyAddInlineable(NativeObject *obj, HandleId id, ConstantOrRegister val,
// Don't attach if we are adding a property to an object which the new
// script properties analysis hasn't been performed for yet, as there
// may be a shape change required here afterwards.
if (obj->type()->newScript() && !obj->type()->newScript()->analyzed())
if (obj->group()->newScript() && !obj->group()->newScript()->analyzed())
return false;
if (needsTypeBarrier)
@ -2747,8 +2747,8 @@ GenerateSetUnboxed(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &
// Guard on the type of the object.
masm.branchPtr(Assembler::NotEqual,
Address(object, JSObject::offsetOfType()),
ImmGCPtr(obj->type()), &failure);
Address(object, JSObject::offsetOfGroup()),
ImmGCPtr(obj->group()), &failure);
if (checkTypeset) {
masm.push(object);
@ -2771,7 +2771,7 @@ GenerateSetUnboxed(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &
// never been converted to native objects and the type set check performed
// above ensures the value being written can be stored in the unboxed
// object.
Label *storeFailure = obj->type()->unknownProperties() ? &failure : nullptr;
Label *storeFailure = obj->group()->unknownProperties() ? &failure : nullptr;
masm.storeUnboxedProperty(address, unboxedType, value, storeFailure);
@ -2828,8 +2828,8 @@ SetPropertyIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj,
RootedPropertyName name(cx, cache.name());
RootedId id(cx, AtomToId(name));
RootedTypeObject oldType(cx, obj->getType(cx));
if (!oldType)
RootedObjectGroup oldGroup(cx, obj->getGroup(cx));
if (!oldGroup)
return false;
// Stop generating new stubs once we hit the stub count limit, see
@ -2913,7 +2913,7 @@ SetPropertyIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj,
&checkTypeset))
{
RootedNativeObject nobj(cx, &obj->as<NativeObject>());
if (!cache.attachAddSlot(cx, script, ion, nobj, oldShape, oldType, checkTypeset))
if (!cache.attachAddSlot(cx, script, ion, nobj, oldShape, oldGroup, checkTypeset))
return false;
addedSetterStub = true;
}

Просмотреть файл

@ -748,7 +748,7 @@ class SetPropertyIC : public RepatchIonCache
void *returnAddr);
bool attachAddSlot(JSContext *cx, HandleScript outerScript, IonScript *ion,
HandleNativeObject obj, HandleShape oldShape, HandleTypeObject oldType,
HandleNativeObject obj, HandleShape oldShape, HandleObjectGroup oldGroup,
bool checkTypeset);
bool attachSetUnboxed(JSContext *cx, HandleScript outerScript, IonScript *ion,

Просмотреть файл

@ -379,8 +379,8 @@ enum MIRType
MIRType_Elements, // An elements vector
MIRType_Pointer, // An opaque pointer that receives no special treatment
MIRType_Shape, // A Shape pointer.
MIRType_TypeObject, // A TypeObject pointer.
MIRType_Last = MIRType_TypeObject,
MIRType_ObjectGroup, // An ObjectGroup pointer.
MIRType_Last = MIRType_ObjectGroup,
MIRType_Float32x4 = MIRType_Float32 | (2 << VECTOR_SCALE_SHIFT),
MIRType_Int32x4 = MIRType_Int32 | (2 << VECTOR_SCALE_SHIFT),
MIRType_Doublex2 = MIRType_Double | (1 << VECTOR_SCALE_SHIFT)

Просмотреть файл

@ -186,7 +186,7 @@ class JitRuntime
JitCode *stringPreBarrier_;
JitCode *objectPreBarrier_;
JitCode *shapePreBarrier_;
JitCode *typeObjectPreBarrier_;
JitCode *objectGroupPreBarrier_;
// Thunk to call malloc/free.
JitCode *mallocStub_;
@ -361,7 +361,7 @@ class JitRuntime
case MIRType_String: return stringPreBarrier_;
case MIRType_Object: return objectPreBarrier_;
case MIRType_Shape: return shapePreBarrier_;
case MIRType_TypeObject: return typeObjectPreBarrier_;
case MIRType_ObjectGroup: return objectGroupPreBarrier_;
default: MOZ_CRASH();
}
}

Просмотреть файл

@ -2098,15 +2098,15 @@ class LFunctionDispatch : public LInstructionHelper<0, 1, 0>
}
};
class LTypeObjectDispatch : public LInstructionHelper<0, 1, 1>
class LObjectGroupDispatch : public LInstructionHelper<0, 1, 1>
{
// Dispatch is performed based on a TypeObject -> block
// Dispatch is performed based on an ObjectGroup -> block
// map inferred by the MIR.
public:
LIR_HEADER(TypeObjectDispatch);
LIR_HEADER(ObjectGroupDispatch);
LTypeObjectDispatch(const LAllocation &in, const LDefinition &temp) {
LObjectGroupDispatch(const LAllocation &in, const LDefinition &temp) {
setOperand(0, in);
setTemp(0, temp);
}
@ -2115,8 +2115,8 @@ class LTypeObjectDispatch : public LInstructionHelper<0, 1, 1>
return getTemp(0);
}
MTypeObjectDispatch *mir() {
return mir_->toTypeObjectDispatch();
MObjectGroupDispatch *mir() {
return mir_->toObjectGroupDispatch();
}
};

Просмотреть файл

@ -103,7 +103,7 @@
_(TestVAndBranch) \
_(TestOAndBranch) \
_(FunctionDispatch) \
_(TypeObjectDispatch) \
_(ObjectGroupDispatch) \
_(Compare) \
_(CompareAndBranch) \
_(CompareD) \
@ -201,7 +201,7 @@
_(StoreSlotT) \
_(GuardShape) \
_(GuardShapePolymorphic) \
_(GuardObjectType) \
_(GuardObjectGroup) \
_(GuardObjectIdentity) \
_(GuardClass) \
_(TypeBarrierV) \

Просмотреть файл

@ -201,7 +201,7 @@ void
LIRGenerator::visitNewCallObject(MNewCallObject *ins)
{
LInstruction *lir;
if (ins->templateObject()->hasSingletonType()) {
if (ins->templateObject()->isSingleton()) {
LNewSingletonCallObject *singletonLir = new(alloc()) LNewSingletonCallObject(temp());
define(singletonLir, ins);
lir = singletonLir;
@ -844,9 +844,9 @@ LIRGenerator::visitFunctionDispatch(MFunctionDispatch *ins)
}
void
LIRGenerator::visitTypeObjectDispatch(MTypeObjectDispatch *ins)
LIRGenerator::visitObjectGroupDispatch(MObjectGroupDispatch *ins)
{
LTypeObjectDispatch *lir = new(alloc()) LTypeObjectDispatch(useRegister(ins->input()), temp());
LObjectGroupDispatch *lir = new(alloc()) LObjectGroupDispatch(useRegister(ins->input()), temp());
add(lir, ins);
}
@ -2124,11 +2124,11 @@ LIRGenerator::visitStringReplace(MStringReplace *ins)
void
LIRGenerator::visitLambda(MLambda *ins)
{
if (ins->info().singletonType || ins->info().useNewTypeForClone) {
if (ins->info().singletonType || ins->info().useSingletonForClone) {
// If the function has a singleton type, this instruction will only be
// executed once so we don't bother inlining it.
//
// If UseNewTypeForClone is true, we will assign a singleton type to
// If UseSingletonForClone is true, we will assign a singleton type to
// the clone and we have to clone the script, we can't do that inline.
LLambdaForSingleton *lir = new(alloc()) LLambdaForSingleton(useRegisterAtStart(ins->scopeChain()));
defineReturn(lir, ins);
@ -2312,7 +2312,7 @@ LIRGenerator::visitTypeBarrier(MTypeBarrier *ins)
return;
}
// Handle typebarrier with specific TypeObject/SingleObjects.
// Handle typebarrier with specific ObjectGroup/SingleObjects.
if (inputType == MIRType_Object && !types->hasType(types::Type::AnyObjectType()) &&
ins->barrierKind() != BarrierKind::TypeTagOnly)
{

Просмотреть файл

@ -109,7 +109,7 @@ class LIRGenerator : public LIRGeneratorSpecific
void visitTest(MTest *test);
void visitGotoWithFake(MGotoWithFake *ins);
void visitFunctionDispatch(MFunctionDispatch *ins);
void visitTypeObjectDispatch(MTypeObjectDispatch *ins);
void visitObjectGroupDispatch(MObjectGroupDispatch *ins);
void visitCompare(MCompare *comp);
void visitTypeOf(MTypeOf *ins);
void visitToId(MToId *ins);

Просмотреть файл

@ -371,9 +371,9 @@ IonBuilder::inlineArray(CallInfo &callInfo)
initLength = callInfo.argc();
allocating = NewArray_FullyAllocating;
types::TypeObjectKey *type = types::TypeObjectKey::get(templateArray);
if (!type->unknownProperties()) {
types::HeapTypeSetKey elemTypes = type->property(JSID_VOID);
types::ObjectGroupKey *key = types::ObjectGroupKey::get(templateArray);
if (!key->unknownProperties()) {
types::HeapTypeSetKey elemTypes = key->property(JSID_VOID);
for (uint32_t i = 0; i < initLength; i++) {
MDefinition *value = callInfo.getArg(i);
@ -402,7 +402,7 @@ IonBuilder::inlineArray(CallInfo &callInfo)
callInfo.setImplicitlyUsedUnchecked();
MNewArrayDynamicLength *ins =
MNewArrayDynamicLength::New(alloc(), constraints(), templateArray,
templateArray->type()->initialHeap(constraints()),
templateArray->group()->initialHeap(constraints()),
arg);
current->add(ins);
current->push(ins);
@ -436,7 +436,7 @@ IonBuilder::inlineArray(CallInfo &callInfo)
current->add(templateConst);
MNewArray *ins = MNewArray::New(alloc(), constraints(), initLength, templateConst,
templateArray->type()->initialHeap(constraints()),
templateArray->group()->initialHeap(constraints()),
allocating);
current->add(ins);
current->push(ins);
@ -500,7 +500,7 @@ IonBuilder::inlineArrayPopShift(CallInfo &callInfo, MArrayPopShift::Mode mode)
// Pop and shift are only handled for dense arrays that have never been
// used in an iterator: popping elements does not account for suppressing
// deleted properties in active iterators.
types::TypeObjectFlags unhandledFlags =
types::ObjectGroupFlags unhandledFlags =
types::OBJECT_FLAG_SPARSE_INDEXES |
types::OBJECT_FLAG_LENGTH_OVERFLOW |
types::OBJECT_FLAG_ITERATED;
@ -732,11 +732,11 @@ IonBuilder::inlineArrayConcat(CallInfo &callInfo)
if (thisTypes->getObjectCount() != 1)
return InliningStatus_NotInlined;
types::TypeObject *baseThisType = thisTypes->getTypeObject(0);
if (!baseThisType)
types::ObjectGroup *thisGroup = thisTypes->getGroup(0);
if (!thisGroup)
return InliningStatus_NotInlined;
types::TypeObjectKey *thisType = types::TypeObjectKey::get(baseThisType);
if (thisType->unknownProperties())
types::ObjectGroupKey *thisKey = types::ObjectGroupKey::get(thisGroup);
if (thisKey->unknownProperties())
return InliningStatus_NotInlined;
// Don't inline if 'this' is packed and the argument may not be packed
@ -751,28 +751,28 @@ IonBuilder::inlineArrayConcat(CallInfo &callInfo)
// Constraints modeling this concat have not been generated by inference,
// so check that type information already reflects possible side effects of
// this call.
types::HeapTypeSetKey thisElemTypes = thisType->property(JSID_VOID);
types::HeapTypeSetKey thisElemTypes = thisKey->property(JSID_VOID);
types::TemporaryTypeSet *resTypes = getInlineReturnTypeSet();
if (!resTypes->hasType(types::Type::ObjectType(thisType)))
if (!resTypes->hasType(types::Type::ObjectType(thisKey)))
return InliningStatus_NotInlined;
for (unsigned i = 0; i < argTypes->getObjectCount(); i++) {
types::TypeObjectKey *argType = argTypes->getObject(i);
if (!argType)
types::ObjectGroupKey *key = argTypes->getObject(i);
if (!key)
continue;
if (argType->unknownProperties())
if (key->unknownProperties())
return InliningStatus_NotInlined;
types::HeapTypeSetKey elemTypes = argType->property(JSID_VOID);
types::HeapTypeSetKey elemTypes = key->property(JSID_VOID);
if (!elemTypes.knownSubset(constraints(), thisElemTypes))
return InliningStatus_NotInlined;
}
// Inline the call.
JSObject *templateObj = inspector->getTemplateObjectForNative(pc, js::array_concat);
if (!templateObj || templateObj->type() != baseThisType)
if (!templateObj || templateObj->group() != thisGroup)
return InliningStatus_NotInlined;
MOZ_ASSERT(templateObj->is<ArrayObject>());
@ -780,7 +780,7 @@ IonBuilder::inlineArrayConcat(CallInfo &callInfo)
MArrayConcat *ins = MArrayConcat::New(alloc(), constraints(), callInfo.thisArg(), callInfo.getArg(0),
&templateObj->as<ArrayObject>(),
templateObj->type()->initialHeap(constraints()));
templateObj->group()->initialHeap(constraints()));
current->add(ins);
current->push(ins);
@ -1371,11 +1371,11 @@ IonBuilder::inlineStringSplit(CallInfo &callInfo)
return InliningStatus_NotInlined;
MOZ_ASSERT(templateObject->is<ArrayObject>());
types::TypeObjectKey *retType = types::TypeObjectKey::get(templateObject);
if (retType->unknownProperties())
types::ObjectGroupKey *retKey = types::ObjectGroupKey::get(templateObject);
if (retKey->unknownProperties())
return InliningStatus_NotInlined;
types::HeapTypeSetKey key = retType->property(JSID_VOID);
types::HeapTypeSetKey key = retKey->property(JSID_VOID);
if (!key.maybeTypes())
return InliningStatus_NotInlined;
@ -1690,7 +1690,7 @@ IonBuilder::inlineObjectCreate(CallInfo &callInfo)
return InliningStatus_NotInlined;
MOZ_ASSERT(templateObject->is<PlainObject>());
MOZ_ASSERT(!templateObject->hasSingletonType());
MOZ_ASSERT(!templateObject->isSingleton());
// Ensure the argument matches the template object's prototype.
MDefinition *arg = callInfo.getArg(0);
@ -1699,7 +1699,7 @@ IonBuilder::inlineObjectCreate(CallInfo &callInfo)
return InliningStatus_NotInlined;
types::TemporaryTypeSet *types = arg->resultTypeSet();
if (!types || types->getSingleton() != proto)
if (!types || types->maybeSingleton() != proto)
return InliningStatus_NotInlined;
MOZ_ASSERT(types->getKnownMIRType() == MIRType_Object);
@ -1713,7 +1713,7 @@ IonBuilder::inlineObjectCreate(CallInfo &callInfo)
MConstant *templateConst = MConstant::NewConstraintlessObject(alloc(), templateObject);
current->add(templateConst);
MNewObject *ins = MNewObject::New(alloc(), constraints(), templateConst,
templateObject->type()->initialHeap(constraints()),
templateObject->group()->initialHeap(constraints()),
MNewObject::ObjectCreate);
current->add(ins);
current->push(ins);
@ -2664,7 +2664,7 @@ IonBuilder::inlineConstructTypedObject(CallInfo &callInfo, TypeDescr *descr)
callInfo.setImplicitlyUsedUnchecked();
MNewTypedObject *ins = MNewTypedObject::New(alloc(), constraints(), templateObject,
templateObject->type()->initialHeap(constraints()));
templateObject->group()->initialHeap(constraints()));
current->add(ins);
current->push(ins);
@ -2713,7 +2713,7 @@ IonBuilder::inlineConstructSimdObject(CallInfo &callInfo, SimdTypeDescr *descr)
current->add(values);
MSimdBox *obj = MSimdBox::New(alloc(), constraints(), values, inlineTypedObject,
inlineTypedObject->type()->initialHeap(constraints()));
inlineTypedObject->group()->initialHeap(constraints()));
current->add(obj);
current->push(obj);
@ -2743,7 +2743,7 @@ IonBuilder::inlineSimdInt32x4BinaryArith(CallInfo &callInfo, JSNative native,
op, MIRType_Int32x4);
MSimdBox *obj = MSimdBox::New(alloc(), constraints(), ins, inlineTypedObject,
inlineTypedObject->type()->initialHeap(constraints()));
inlineTypedObject->group()->initialHeap(constraints()));
current->add(ins);
current->add(obj);

Просмотреть файл

@ -650,13 +650,13 @@ MConstant::NewConstraintlessObject(TempAllocator &alloc, JSObject *v)
types::TemporaryTypeSet *
jit::MakeSingletonTypeSet(types::CompilerConstraintList *constraints, JSObject *obj)
{
// Invalidate when this object's TypeObject gets unknown properties. This
// Invalidate when this object's ObjectGroup gets unknown properties. This
// happens for instance when we mutate an object's __proto__, in this case
// we want to invalidate and mark this TypeSet as containing AnyObject
// (because mutating __proto__ will change an object's TypeObject).
// (because mutating __proto__ will change an object's ObjectGroup).
MOZ_ASSERT(constraints);
types::TypeObjectKey *objType = types::TypeObjectKey::get(obj);
objType->hasStableClassAndProto(constraints);
types::ObjectGroupKey *key = types::ObjectGroupKey::get(obj);
key->hasStableClassAndProto(constraints);
LifoAlloc *alloc = GetJitContext()->temp->lifoAlloc();
return alloc->new_<types::TemporaryTypeSet>(alloc, types::Type::ObjectType(obj));
@ -812,7 +812,7 @@ MNurseryObject::MNurseryObject(JSObject *obj, uint32_t index, types::CompilerCon
setResultType(MIRType_Object);
MOZ_ASSERT(IsInsideNursery(obj));
MOZ_ASSERT(!obj->hasSingletonType());
MOZ_ASSERT(!obj->isSingleton());
setResultTypeSet(MakeSingletonTypeSet(constraints, obj));
setMovable();
@ -3641,7 +3641,7 @@ bool
MNewObject::shouldUseVM() const
{
PlainObject *obj = templateObject();
return obj->hasSingletonType() || obj->hasDynamicSlots();
return obj->isSingleton() || obj->hasDynamicSlots();
}
bool
@ -3759,7 +3759,7 @@ MNewArray::shouldUseVM() const
// immediately, but only when data doesn't fit the available array slots.
bool allocating = allocatingBehaviour() != NewArray_Unallocating && count() > arraySlots;
return templateObject()->hasSingletonType() || allocating;
return templateObject()->isSingleton() || allocating;
}
bool
@ -4039,7 +4039,7 @@ InlinePropertyTable::buildTypeSetForFunction(JSFunction *func) const
return nullptr;
for (size_t i = 0; i < numEntries(); i++) {
if (entries_[i]->func == func)
types->addType(types::Type::ObjectType(entries_[i]->typeObj), alloc);
types->addType(types::Type::ObjectType(entries_[i]->group), alloc);
}
return types;
}
@ -4351,14 +4351,14 @@ jit::DenseNativeElementType(types::CompilerConstraintList *constraints, MDefinit
unsigned count = types->getObjectCount();
for (unsigned i = 0; i < count; i++) {
types::TypeObjectKey *object = types->getObject(i);
if (!object)
types::ObjectGroupKey *key = types->getObject(i);
if (!key)
continue;
if (object->unknownProperties())
if (key->unknownProperties())
return MIRType_None;
types::HeapTypeSetKey elementTypes = object->property(JSID_VOID);
types::HeapTypeSetKey elementTypes = key->property(JSID_VOID);
MIRType type = elementTypes.knownMIRType(constraints);
if (type == MIRType_None)
@ -4375,7 +4375,7 @@ jit::DenseNativeElementType(types::CompilerConstraintList *constraints, MDefinit
static BarrierKind
PropertyReadNeedsTypeBarrier(types::CompilerConstraintList *constraints,
types::TypeObjectKey *object, PropertyName *name,
types::ObjectGroupKey *key, PropertyName *name,
types::TypeSet *observed)
{
// If the object being read from has types for the property which haven't
@ -4386,14 +4386,14 @@ PropertyReadNeedsTypeBarrier(types::CompilerConstraintList *constraints,
//
// We also need a barrier if the object is a proxy, because then all bets
// are off, just as if it has unknown properties.
if (object->unknownProperties() || observed->empty() ||
object->clasp()->isProxy())
if (key->unknownProperties() || observed->empty() ||
key->clasp()->isProxy())
{
return BarrierKind::TypeSet;
}
jsid id = name ? NameToId(name) : JSID_VOID;
types::HeapTypeSetKey property = object->property(id);
types::HeapTypeSetKey property = key->property(id);
if (property.maybeTypes()) {
if (!TypeSetIncludes(observed, MIRType_Value, property.maybeTypes())) {
// If all possible objects have been observed, we don't have to
@ -4410,7 +4410,8 @@ PropertyReadNeedsTypeBarrier(types::CompilerConstraintList *constraints,
// initial 'undefined' value for properties, in particular global
// variables declared with 'var'. Until the property is assigned a value
// other than undefined, a barrier is required.
if (JSObject *obj = object->singleton()) {
if (key->isSingleton()) {
JSObject *obj = key->singleton();
if (name && types::CanHaveEmptyPropertyTypesForOwnProperty(obj) &&
(!property.maybeTypes() || property.maybeTypes()->empty()))
{
@ -4425,17 +4426,17 @@ PropertyReadNeedsTypeBarrier(types::CompilerConstraintList *constraints,
BarrierKind
jit::PropertyReadNeedsTypeBarrier(JSContext *propertycx,
types::CompilerConstraintList *constraints,
types::TypeObjectKey *object, PropertyName *name,
types::ObjectGroupKey *key, PropertyName *name,
types::TemporaryTypeSet *observed, bool updateObserved)
{
// If this access has never executed, try to add types to the observed set
// according to any property which exists on the object or its prototype.
if (updateObserved && observed->empty() && name) {
JSObject *obj;
if (object->singleton())
obj = object->singleton();
else if (object->hasTenuredProto())
obj = object->proto().toObjectOrNull();
if (key->isSingleton())
obj = key->singleton();
else if (key->hasTenuredProto())
obj = key->proto().toObjectOrNull();
else
obj = nullptr;
@ -4443,12 +4444,12 @@ jit::PropertyReadNeedsTypeBarrier(JSContext *propertycx,
if (!obj->getClass()->isNative())
break;
types::TypeObjectKey *typeObj = types::TypeObjectKey::get(obj);
types::ObjectGroupKey *key = types::ObjectGroupKey::get(obj);
if (propertycx)
typeObj->ensureTrackedProperty(propertycx, NameToId(name));
key->ensureTrackedProperty(propertycx, NameToId(name));
if (!typeObj->unknownProperties()) {
types::HeapTypeSetKey property = typeObj->property(NameToId(name));
if (!key->unknownProperties()) {
types::HeapTypeSetKey property = key->property(NameToId(name));
if (property.maybeTypes()) {
types::TypeSet::TypeList types;
if (!property.maybeTypes()->enumerateTypes(&types))
@ -4467,7 +4468,7 @@ jit::PropertyReadNeedsTypeBarrier(JSContext *propertycx,
}
}
return PropertyReadNeedsTypeBarrier(constraints, object, name, observed);
return PropertyReadNeedsTypeBarrier(constraints, key, name, observed);
}
BarrierKind
@ -4487,9 +4488,9 @@ jit::PropertyReadNeedsTypeBarrier(JSContext *propertycx,
bool updateObserved = types->getObjectCount() == 1;
for (size_t i = 0; i < types->getObjectCount(); i++) {
types::TypeObjectKey *object = types->getObject(i);
if (object) {
BarrierKind kind = PropertyReadNeedsTypeBarrier(propertycx, constraints, object, name,
types::ObjectGroupKey *key = types->getObject(i);
if (key) {
BarrierKind kind = PropertyReadNeedsTypeBarrier(propertycx, constraints, key, name,
observed, updateObserved);
if (kind == BarrierKind::TypeSet)
return BarrierKind::TypeSet;
@ -4521,16 +4522,16 @@ jit::PropertyReadOnPrototypeNeedsTypeBarrier(types::CompilerConstraintList *cons
BarrierKind res = BarrierKind::NoBarrier;
for (size_t i = 0; i < types->getObjectCount(); i++) {
types::TypeObjectKey *object = types->getObject(i);
if (!object)
types::ObjectGroupKey *key = types->getObject(i);
if (!key)
continue;
while (true) {
if (!object->hasStableClassAndProto(constraints) || !object->hasTenuredProto())
if (!key->hasStableClassAndProto(constraints) || !key->hasTenuredProto())
return BarrierKind::TypeSet;
if (!object->proto().isObject())
if (!key->proto().isObject())
break;
object = types::TypeObjectKey::get(object->proto().toObject());
BarrierKind kind = PropertyReadNeedsTypeBarrier(constraints, object, name, observed);
key = types::ObjectGroupKey::get(key->proto().toObject());
BarrierKind kind = PropertyReadNeedsTypeBarrier(constraints, key, name, observed);
if (kind == BarrierKind::TypeSet)
return BarrierKind::TypeSet;
@ -4557,13 +4558,13 @@ jit::PropertyReadIsIdempotent(types::CompilerConstraintList *constraints,
return false;
for (size_t i = 0; i < types->getObjectCount(); i++) {
types::TypeObjectKey *object = types->getObject(i);
if (object) {
if (object->unknownProperties())
types::ObjectGroupKey *key = types->getObject(i);
if (key) {
if (key->unknownProperties())
return false;
// Check if the property has been reconfigured or is a getter.
types::HeapTypeSetKey property = object->property(NameToId(name));
types::HeapTypeSetKey property = key->property(NameToId(name));
if (property.nonData(constraints))
return false;
}
@ -4588,17 +4589,17 @@ jit::AddObjectsForPropertyRead(MDefinition *obj, PropertyName *name,
}
for (size_t i = 0; i < types->getObjectCount(); i++) {
types::TypeObjectKey *object = types->getObject(i);
if (!object)
types::ObjectGroupKey *key = types->getObject(i);
if (!key)
continue;
if (object->unknownProperties()) {
if (key->unknownProperties()) {
observed->addType(types::Type::AnyObjectType(), alloc);
return;
}
jsid id = name ? NameToId(name) : JSID_VOID;
types::HeapTypeSetKey property = object->property(id);
types::HeapTypeSetKey property = key->property(id);
types::HeapTypeSet *types = property.maybeTypes();
if (!types)
continue;
@ -4609,9 +4610,9 @@ jit::AddObjectsForPropertyRead(MDefinition *obj, PropertyName *name,
}
for (size_t i = 0; i < types->getObjectCount(); i++) {
types::TypeObjectKey *object = types->getObject(i);
if (object)
observed->addType(types::Type::ObjectType(object), alloc);
types::ObjectGroupKey *key = types->getObject(i);
if (key)
observed->addType(types::Type::ObjectType(key), alloc);
}
}
}
@ -4651,15 +4652,15 @@ TryAddTypeBarrierForWrite(TempAllocator &alloc, types::CompilerConstraintList *c
Maybe<types::HeapTypeSetKey> aggregateProperty;
for (size_t i = 0; i < objTypes->getObjectCount(); i++) {
types::TypeObjectKey *object = objTypes->getObject(i);
if (!object)
types::ObjectGroupKey *key = objTypes->getObject(i);
if (!key)
continue;
if (object->unknownProperties())
if (key->unknownProperties())
return false;
jsid id = name ? NameToId(name) : JSID_VOID;
types::HeapTypeSetKey property = object->property(id);
types::HeapTypeSetKey property = key->property(id);
if (!property.maybeTypes() || property.couldBeConstant(constraints))
return false;
@ -4723,21 +4724,21 @@ TryAddTypeBarrierForWrite(TempAllocator &alloc, types::CompilerConstraintList *c
}
static MInstruction *
AddTypeGuard(TempAllocator &alloc, MBasicBlock *current, MDefinition *obj,
types::TypeObjectKey *type, bool bailOnEquality)
AddGroupGuard(TempAllocator &alloc, MBasicBlock *current, MDefinition *obj,
types::ObjectGroupKey *key, bool bailOnEquality)
{
MInstruction *guard;
if (type->isTypeObject()) {
guard = MGuardObjectType::New(alloc, obj, type->asTypeObject(), bailOnEquality,
Bailout_ObjectIdentityOrTypeGuard);
if (key->isGroup()) {
guard = MGuardObjectGroup::New(alloc, obj, key->group(), bailOnEquality,
Bailout_ObjectIdentityOrTypeGuard);
} else {
guard = MGuardObjectIdentity::New(alloc, obj, type->asSingleObject(), bailOnEquality);
guard = MGuardObjectIdentity::New(alloc, obj, key->singleton(), bailOnEquality);
}
current->add(guard);
// For now, never move type object guards.
// For now, never move object group / identity guards.
guard->setNotMovable();
return guard;
@ -4777,17 +4778,17 @@ jit::PropertyWriteNeedsTypeBarrier(TempAllocator &alloc, types::CompilerConstrai
bool success = true;
for (size_t i = 0; i < types->getObjectCount(); i++) {
types::TypeObjectKey *object = types->getObject(i);
if (!object || object->unknownProperties())
types::ObjectGroupKey *key = types->getObject(i);
if (!key || key->unknownProperties())
continue;
// TI doesn't track TypedArray objects and should never insert a type
// TI doesn't track TypedArray indexes and should never insert a type
// barrier for them.
if (!name && IsAnyTypedArrayClass(object->clasp()))
if (!name && IsAnyTypedArrayClass(key->clasp()))
continue;
jsid id = name ? NameToId(name) : JSID_VOID;
types::HeapTypeSetKey property = object->property(id);
types::HeapTypeSetKey property = key->property(id);
if (!CanWriteProperty(alloc, constraints, property, *pvalue, implicitType)) {
// Either pobj or pvalue needs to be modified to filter out the
// types which the value could have but are not in the property,
@ -4811,26 +4812,26 @@ jit::PropertyWriteNeedsTypeBarrier(TempAllocator &alloc, types::CompilerConstrai
if (types->getObjectCount() <= 1)
return true;
types::TypeObjectKey *excluded = nullptr;
types::ObjectGroupKey *excluded = nullptr;
for (size_t i = 0; i < types->getObjectCount(); i++) {
types::TypeObjectKey *object = types->getObject(i);
if (!object || object->unknownProperties())
types::ObjectGroupKey *key = types->getObject(i);
if (!key || key->unknownProperties())
continue;
if (!name && IsAnyTypedArrayClass(object->clasp()))
if (!name && IsAnyTypedArrayClass(key->clasp()))
continue;
jsid id = name ? NameToId(name) : JSID_VOID;
types::HeapTypeSetKey property = object->property(id);
types::HeapTypeSetKey property = key->property(id);
if (CanWriteProperty(alloc, constraints, property, *pvalue, implicitType))
continue;
if ((property.maybeTypes() && !property.maybeTypes()->empty()) || excluded)
return true;
excluded = object;
excluded = key;
}
MOZ_ASSERT(excluded);
*pobj = AddTypeGuard(alloc, current, *pobj, excluded, /* bailOnEquality = */ true);
*pobj = AddGroupGuard(alloc, current, *pobj, excluded, /* bailOnEquality = */ true);
return false;
}

Просмотреть файл

@ -2692,7 +2692,7 @@ class MNewArray
{
ArrayObject *obj = templateObject();
setResultType(MIRType_Object);
if (!obj->hasSingletonType())
if (!obj->isSingleton())
setResultTypeSet(MakeSingletonTypeSet(constraints, obj));
}
@ -2754,7 +2754,7 @@ class MNewArrayCopyOnWrite : public MNullaryInstruction
: templateObject_(templateObject),
initialHeap_(initialHeap)
{
MOZ_ASSERT(!templateObject->hasSingletonType());
MOZ_ASSERT(!templateObject->isSingleton());
setResultType(MIRType_Object);
setResultTypeSet(MakeSingletonTypeSet(constraints, templateObject));
}
@ -2798,7 +2798,7 @@ class MNewArrayDynamicLength
{
setGuard(); // Need to throw if length is negative.
setResultType(MIRType_Object);
if (!templateObject->hasSingletonType())
if (!templateObject->isSingleton())
setResultTypeSet(MakeSingletonTypeSet(constraints, templateObject));
}
@ -2847,7 +2847,7 @@ class MNewObject
PlainObject *obj = templateObject();
MOZ_ASSERT_IF(mode != ObjectLiteral, !shouldUseVM());
setResultType(MIRType_Object);
if (!obj->hasSingletonType())
if (!obj->isSingleton())
setResultTypeSet(MakeSingletonTypeSet(constraints, obj));
// The constant is kept separated in a MConstant, this way we can safely
@ -6313,8 +6313,8 @@ class MStringSplit
JSObject *templateObject() const {
return &getOperand(2)->toConstant()->value().toObject();
}
types::TypeObject *typeObject() const {
return templateObject()->type();
types::ObjectGroup *group() const {
return templateObject()->group();
}
bool possiblyCalls() const MOZ_OVERRIDE {
return true;
@ -7152,22 +7152,22 @@ struct LambdaFunctionInfo
uint16_t flags;
gc::Cell *scriptOrLazyScript;
bool singletonType;
bool useNewTypeForClone;
bool useSingletonForClone;
explicit LambdaFunctionInfo(JSFunction *fun)
: fun(fun), flags(fun->flags()),
scriptOrLazyScript(fun->hasScript()
? (gc::Cell *) fun->nonLazyScript()
: (gc::Cell *) fun->lazyScript()),
singletonType(fun->hasSingletonType()),
useNewTypeForClone(types::UseNewTypeForClone(fun))
singletonType(fun->isSingleton()),
useSingletonForClone(types::UseSingletonForClone(fun))
{}
LambdaFunctionInfo(const LambdaFunctionInfo &info)
: fun((JSFunction *) info.fun), flags(info.flags),
scriptOrLazyScript(info.scriptOrLazyScript),
singletonType(info.singletonType),
useNewTypeForClone(info.useNewTypeForClone)
useSingletonForClone(info.useSingletonForClone)
{}
};
@ -7181,7 +7181,7 @@ class MLambda
: MBinaryInstruction(scopeChain, cst), info_(&cst->value().toObject().as<JSFunction>())
{
setResultType(MIRType_Object);
if (!info().fun->hasSingletonType() && !types::UseNewTypeForClone(info().fun))
if (!info().fun->isSingleton() && !types::UseSingletonForClone(info().fun))
setResultTypeSet(MakeSingletonTypeSet(constraints, info().fun));
}
@ -7219,8 +7219,8 @@ class MLambdaArrow
: MBinaryInstruction(scopeChain, this_), info_(fun)
{
setResultType(MIRType_Object);
MOZ_ASSERT(!types::UseNewTypeForClone(fun));
if (!fun->hasSingletonType())
MOZ_ASSERT(!types::UseSingletonForClone(fun));
if (!fun->isSingleton())
setResultTypeSet(MakeSingletonTypeSet(constraints, fun));
}
@ -9195,11 +9195,11 @@ typedef Vector<bool, 4, JitAllocPolicy> BoolVector;
class InlinePropertyTable : public TempObject
{
struct Entry : public TempObject {
AlwaysTenured<types::TypeObject *> typeObj;
AlwaysTenured<types::ObjectGroup *> group;
AlwaysTenuredFunction func;
Entry(types::TypeObject *typeObj, JSFunction *func)
: typeObj(typeObj), func(func)
Entry(types::ObjectGroup *group, JSFunction *func)
: group(group), func(func)
{ }
};
@ -9226,17 +9226,17 @@ class InlinePropertyTable : public TempObject
return pc_;
}
bool addEntry(TempAllocator &alloc, types::TypeObject *typeObj, JSFunction *func) {
return entries_.append(new(alloc) Entry(typeObj, func));
bool addEntry(TempAllocator &alloc, types::ObjectGroup *group, JSFunction *func) {
return entries_.append(new(alloc) Entry(group, func));
}
size_t numEntries() const {
return entries_.length();
}
types::TypeObject *getTypeObject(size_t i) const {
types::ObjectGroup *getObjectGroup(size_t i) const {
MOZ_ASSERT(i < numEntries());
return entries_[i]->typeObj;
return entries_[i]->group;
}
JSFunction *getFunction(size_t i) const {
@ -9359,8 +9359,8 @@ class MGetPropertyCache
bool updateForReplacement(MDefinition *ins) MOZ_OVERRIDE;
};
// Emit code to load a value from an object if its shape/type matches one of
// the shapes/types observed by the baseline IC, else bails out.
// Emit code to load a value from an object if its shape/group matches one of
// the shapes/groups observed by the baseline IC, else bails out.
class MGetPropertyPolymorphic
: public MUnaryInstruction,
public SingleObjectPolicy::Data
@ -9374,13 +9374,13 @@ class MGetPropertyPolymorphic
};
Vector<Entry, 4, JitAllocPolicy> nativeShapes_;
Vector<types::TypeObject *, 4, JitAllocPolicy> unboxedTypes_;
Vector<types::ObjectGroup *, 4, JitAllocPolicy> unboxedGroups_;
AlwaysTenuredPropertyName name_;
MGetPropertyPolymorphic(TempAllocator &alloc, MDefinition *obj, PropertyName *name)
: MUnaryInstruction(obj),
nativeShapes_(alloc),
unboxedTypes_(alloc),
unboxedGroups_(alloc),
name_(name)
{
setGuard();
@ -9409,8 +9409,8 @@ class MGetPropertyPolymorphic
entry.shape = shape;
return nativeShapes_.append(entry);
}
bool addUnboxedType(types::TypeObject *type) {
return unboxedTypes_.append(type);
bool addUnboxedGroup(types::ObjectGroup *group) {
return unboxedGroups_.append(group);
}
size_t numShapes() const {
return nativeShapes_.length();
@ -9421,11 +9421,11 @@ class MGetPropertyPolymorphic
Shape *shape(size_t i) const {
return nativeShapes_[i].shape;
}
size_t numUnboxedTypes() const {
return unboxedTypes_.length();
size_t numUnboxedGroups() const {
return unboxedGroups_.length();
}
types::TypeObject *unboxedType(size_t i) const {
return unboxedTypes_[i];
types::ObjectGroup *unboxedGroup(size_t i) const {
return unboxedGroups_[i];
}
PropertyName *name() const {
return name_;
@ -9435,14 +9435,14 @@ class MGetPropertyPolymorphic
}
AliasSet getAliasSet() const MOZ_OVERRIDE {
return AliasSet::Load(AliasSet::ObjectFields | AliasSet::FixedSlot | AliasSet::DynamicSlot |
(unboxedTypes_.empty() ? 0 : (AliasSet::TypedArrayElement | AliasSet::Element)));
(unboxedGroups_.empty() ? 0 : (AliasSet::TypedArrayElement | AliasSet::Element)));
}
bool mightAlias(const MDefinition *store) const MOZ_OVERRIDE;
};
// Emit code to store a value to an object's slots if its shape matches
// one of the shapes observed by the baseline IC, else bails out.
// Emit code to store a value to an object's slots if its shape/group matches
// one of the shapes/groups observed by the baseline IC, else bails out.
class MSetPropertyPolymorphic
: public MBinaryInstruction,
public MixPolicy<SingleObjectPolicy, NoFloatPolicy<1> >::Data
@ -9456,7 +9456,7 @@ class MSetPropertyPolymorphic
};
Vector<Entry, 4, JitAllocPolicy> nativeShapes_;
Vector<types::TypeObject *, 4, JitAllocPolicy> unboxedTypes_;
Vector<types::ObjectGroup *, 4, JitAllocPolicy> unboxedGroups_;
AlwaysTenuredPropertyName name_;
bool needsBarrier_;
@ -9464,7 +9464,7 @@ class MSetPropertyPolymorphic
PropertyName *name)
: MBinaryInstruction(obj, value),
nativeShapes_(alloc),
unboxedTypes_(alloc),
unboxedGroups_(alloc),
name_(name),
needsBarrier_(false)
{
@ -9484,8 +9484,8 @@ class MSetPropertyPolymorphic
entry.shape = shape;
return nativeShapes_.append(entry);
}
bool addUnboxedType(types::TypeObject *type) {
return unboxedTypes_.append(type);
bool addUnboxedGroup(types::ObjectGroup *group) {
return unboxedGroups_.append(group);
}
size_t numShapes() const {
return nativeShapes_.length();
@ -9496,11 +9496,11 @@ class MSetPropertyPolymorphic
Shape *shape(size_t i) const {
return nativeShapes_[i].shape;
}
size_t numUnboxedTypes() const {
return unboxedTypes_.length();
size_t numUnboxedGroups() const {
return unboxedGroups_.length();
}
types::TypeObject *unboxedType(size_t i) const {
return unboxedTypes_[i];
types::ObjectGroup *unboxedGroup(size_t i) const {
return unboxedGroups_[i];
}
PropertyName *name() const {
return name_;
@ -9519,7 +9519,7 @@ class MSetPropertyPolymorphic
}
AliasSet getAliasSet() const MOZ_OVERRIDE {
return AliasSet::Store(AliasSet::ObjectFields | AliasSet::FixedSlot | AliasSet::DynamicSlot |
(unboxedTypes_.empty() ? 0 : (AliasSet::TypedArrayElement | AliasSet::Element)));
(unboxedGroups_.empty() ? 0 : (AliasSet::TypedArrayElement | AliasSet::Element)));
}
};
@ -9530,14 +9530,14 @@ class MDispatchInstruction
// Map from JSFunction* -> MBasicBlock.
struct Entry {
JSFunction *func;
// If |func| has a singleton type, |funcType| is null. Otherwise,
// |funcType| holds the TypeObject for |func|, and dispatch guards
// on the type instead of directly on the function.
types::TypeObject *funcType;
// If |func| has a singleton group, |funcGroup| is null. Otherwise,
// |funcGroup| holds the ObjectGroup for |func|, and dispatch guards
// on the group instead of directly on the function.
types::ObjectGroup *funcGroup;
MBasicBlock *block;
Entry(JSFunction *func, types::TypeObject *funcType, MBasicBlock *block)
: func(func), funcType(funcType), block(block)
Entry(JSFunction *func, types::ObjectGroup *funcGroup, MBasicBlock *block)
: func(func), funcGroup(funcGroup), block(block)
{ }
};
Vector<Entry, 4, JitAllocPolicy> map_;
@ -9605,8 +9605,8 @@ class MDispatchInstruction
}
public:
void addCase(JSFunction *func, types::TypeObject *funcType, MBasicBlock *block) {
map_.append(Entry(func, funcType, block));
void addCase(JSFunction *func, types::ObjectGroup *funcGroup, MBasicBlock *block) {
map_.append(Entry(func, funcGroup, block));
}
uint32_t numCases() const {
return map_.length();
@ -9614,8 +9614,8 @@ class MDispatchInstruction
JSFunction *getCase(uint32_t i) const {
return map_[i].func;
}
types::TypeObject *getCaseTypeObject(uint32_t i) const {
return map_[i].funcType;
types::ObjectGroup *getCaseObjectGroup(uint32_t i) const {
return map_[i].funcGroup;
}
MBasicBlock *getCaseBlock(uint32_t i) const {
return map_[i].block;
@ -9639,24 +9639,24 @@ class MDispatchInstruction
}
};
// Polymorphic dispatch for inlining, keyed off incoming TypeObject.
class MTypeObjectDispatch : public MDispatchInstruction
// Polymorphic dispatch for inlining, keyed off incoming ObjectGroup.
class MObjectGroupDispatch : public MDispatchInstruction
{
// Map TypeObject (of CallProp's Target Object) -> JSFunction (yielded by the CallProp).
// Map ObjectGroup (of CallProp's Target Object) -> JSFunction (yielded by the CallProp).
InlinePropertyTable *inlinePropertyTable_;
MTypeObjectDispatch(TempAllocator &alloc, MDefinition *input, InlinePropertyTable *table)
MObjectGroupDispatch(TempAllocator &alloc, MDefinition *input, InlinePropertyTable *table)
: MDispatchInstruction(alloc, input),
inlinePropertyTable_(table)
{ }
public:
INSTRUCTION_HEADER(TypeObjectDispatch)
INSTRUCTION_HEADER(ObjectGroupDispatch)
static MTypeObjectDispatch *New(TempAllocator &alloc, MDefinition *ins,
InlinePropertyTable *table)
static MObjectGroupDispatch *New(TempAllocator &alloc, MDefinition *ins,
InlinePropertyTable *table)
{
return new(alloc) MTypeObjectDispatch(alloc, ins, table);
return new(alloc) MObjectGroupDispatch(alloc, ins, table);
}
InlinePropertyTable *propTable() const {
@ -9847,19 +9847,19 @@ class MGuardShapePolymorphic
}
};
// Guard on an object's type, inclusively or exclusively.
class MGuardObjectType
// Guard on an object's group, inclusively or exclusively.
class MGuardObjectGroup
: public MUnaryInstruction,
public SingleObjectPolicy::Data
{
AlwaysTenured<types::TypeObject *> typeObject_;
AlwaysTenured<types::ObjectGroup *> group_;
bool bailOnEquality_;
BailoutKind bailoutKind_;
MGuardObjectType(MDefinition *obj, types::TypeObject *typeObject, bool bailOnEquality,
BailoutKind bailoutKind)
MGuardObjectGroup(MDefinition *obj, types::ObjectGroup *group, bool bailOnEquality,
BailoutKind bailoutKind)
: MUnaryInstruction(obj),
typeObject_(typeObject),
group_(group),
bailOnEquality_(bailOnEquality),
bailoutKind_(bailoutKind)
{
@ -9869,18 +9869,18 @@ class MGuardObjectType
}
public:
INSTRUCTION_HEADER(GuardObjectType)
INSTRUCTION_HEADER(GuardObjectGroup)
static MGuardObjectType *New(TempAllocator &alloc, MDefinition *obj, types::TypeObject *typeObject,
bool bailOnEquality, BailoutKind bailoutKind) {
return new(alloc) MGuardObjectType(obj, typeObject, bailOnEquality, bailoutKind);
static MGuardObjectGroup *New(TempAllocator &alloc, MDefinition *obj, types::ObjectGroup *group,
bool bailOnEquality, BailoutKind bailoutKind) {
return new(alloc) MGuardObjectGroup(obj, group, bailOnEquality, bailoutKind);
}
MDefinition *obj() const {
return getOperand(0);
}
const types::TypeObject *typeObject() const {
return typeObject_;
const types::ObjectGroup *group() const {
return group_;
}
bool bailOnEquality() const {
return bailOnEquality_;
@ -9889,13 +9889,13 @@ class MGuardObjectType
return bailoutKind_;
}
bool congruentTo(const MDefinition *ins) const MOZ_OVERRIDE {
if (!ins->isGuardObjectType())
if (!ins->isGuardObjectGroup())
return false;
if (typeObject() != ins->toGuardObjectType()->typeObject())
if (group() != ins->toGuardObjectGroup()->group())
return false;
if (bailOnEquality() != ins->toGuardObjectType()->bailOnEquality())
if (bailOnEquality() != ins->toGuardObjectGroup()->bailOnEquality())
return false;
if (bailoutKind() != ins->toGuardObjectType()->bailoutKind())
if (bailoutKind() != ins->toGuardObjectGroup()->bailoutKind())
return false;
return congruentIfOperandsEqual(ins);
}
@ -12692,7 +12692,7 @@ bool ElementAccessHasExtraIndexedProperty(types::CompilerConstraintList *constra
MIRType DenseNativeElementType(types::CompilerConstraintList *constraints, MDefinition *obj);
BarrierKind PropertyReadNeedsTypeBarrier(JSContext *propertycx,
types::CompilerConstraintList *constraints,
types::TypeObjectKey *object, PropertyName *name,
types::ObjectGroupKey *object, PropertyName *name,
types::TemporaryTypeSet *observed, bool updateObserved);
BarrierKind PropertyReadNeedsTypeBarrier(JSContext *propertycx,
types::CompilerConstraintList *constraints,

Просмотреть файл

@ -154,12 +154,12 @@ class MIRGenerator
return modifiesFrameArguments_;
}
typedef Vector<types::TypeObject *, 0, JitAllocPolicy> TypeObjectVector;
typedef Vector<types::ObjectGroup *, 0, JitAllocPolicy> ObjectGroupVector;
// When abortReason() == AbortReason_NewScriptProperties, all types which
// the new script properties analysis hasn't been performed on yet.
const TypeObjectVector &abortedNewScriptPropertiesTypes() const {
return abortedNewScriptPropertiesTypes_;
const ObjectGroupVector &abortedNewScriptPropertiesGroups() const {
return abortedNewScriptPropertiesGroups_;
}
public:
@ -174,7 +174,7 @@ class MIRGenerator
MIRGraph *graph_;
AbortReason abortReason_;
bool shouldForceAbort_; // Force AbortReason_Disable
TypeObjectVector abortedNewScriptPropertiesTypes_;
ObjectGroupVector abortedNewScriptPropertiesGroups_;
bool error_;
mozilla::Atomic<bool, mozilla::Relaxed> *pauseBuild_;
mozilla::Atomic<bool, mozilla::Relaxed> cancelBuild_;
@ -199,7 +199,7 @@ class MIRGenerator
// CodeGenerator::link).
ObjectVector nurseryObjects_;
void addAbortedNewScriptPropertiesType(types::TypeObject *type);
void addAbortedNewScriptPropertiesGroup(types::ObjectGroup *type);
void setForceAbort() {
shouldForceAbort_ = true;
}

Просмотреть файл

@ -27,7 +27,7 @@ MIRGenerator::MIRGenerator(CompileCompartment *compartment, const JitCompileOpti
graph_(graph),
abortReason_(AbortReason_NoAbort),
shouldForceAbort_(false),
abortedNewScriptPropertiesTypes_(*alloc_),
abortedNewScriptPropertiesGroups_(*alloc_),
error_(false),
pauseBuild_(nullptr),
cancelBuild_(false),
@ -93,14 +93,14 @@ MIRGenerator::abort(const char *message, ...)
}
void
MIRGenerator::addAbortedNewScriptPropertiesType(types::TypeObject *type)
MIRGenerator::addAbortedNewScriptPropertiesGroup(types::ObjectGroup *group)
{
for (size_t i = 0; i < abortedNewScriptPropertiesTypes_.length(); i++) {
if (type == abortedNewScriptPropertiesTypes_[i])
for (size_t i = 0; i < abortedNewScriptPropertiesGroups_.length(); i++) {
if (group == abortedNewScriptPropertiesGroups_[i])
return;
}
if (!abortedNewScriptPropertiesTypes_.append(type))
CrashAtUnhandlableOOM("addAbortedNewScriptPropertiesType");
if (!abortedNewScriptPropertiesGroups_.append(group))
CrashAtUnhandlableOOM("addAbortedNewScriptPropertiesGroup");
}
void

Просмотреть файл

@ -39,7 +39,7 @@ namespace jit {
_(Goto) \
_(Test) \
_(GotoWithFake) \
_(TypeObjectDispatch) \
_(ObjectGroupDispatch) \
_(FunctionDispatch) \
_(Compare) \
_(Phi) \
@ -160,7 +160,7 @@ namespace jit {
_(BindNameCache) \
_(GuardShape) \
_(GuardShapePolymorphic) \
_(GuardObjectType) \
_(GuardObjectGroup) \
_(GuardObjectIdentity) \
_(GuardClass) \
_(ArrayLength) \

Просмотреть файл

@ -55,14 +55,14 @@ class TypeWrapper {
return 0;
return 1;
}
inline JSObject *getSingleObjectNoBarrier(unsigned) const {
if (t_.isSingleObject())
return t_.singleObjectNoBarrier();
inline JSObject *getSingletonNoBarrier(unsigned) const {
if (t_.isSingleton())
return t_.singletonNoBarrier();
return nullptr;
}
inline types::TypeObject *getTypeObjectNoBarrier(unsigned) const {
if (t_.isTypeObject())
return t_.typeObjectNoBarrier();
inline types::ObjectGroup *getGroupNoBarrier(unsigned) const {
if (t_.isGroup())
return t_.groupNoBarrier();
return nullptr;
}
};
@ -144,9 +144,9 @@ MacroAssembler::guardTypeSet(const Source &address, const TypeSet *types, Barrie
// properties become unknown, so check for this case.
if (obj == scratch)
extractObject(address, scratch);
loadPtr(Address(obj, JSObject::offsetOfType()), scratch);
loadPtr(Address(obj, JSObject::offsetOfGroup()), scratch);
branchTestPtr(Assembler::NonZero,
Address(scratch, types::TypeObject::offsetOfFlags()),
Address(scratch, types::ObjectGroup::offsetOfFlags()),
Imm32(types::OBJECT_FLAG_UNKNOWN_PROPERTIES), &matched);
assumeUnreachable("Unexpected object type");
@ -175,22 +175,22 @@ MacroAssembler::guardObjectType(Register obj, const TypeSet *types,
BranchGCPtr lastBranch;
MOZ_ASSERT(!lastBranch.isInitialized());
bool hasTypeObjects = false;
bool hasObjectGroups = false;
unsigned count = types->getObjectCount();
for (unsigned i = 0; i < count; i++) {
if (!types->getSingleObjectNoBarrier(i)) {
hasTypeObjects = hasTypeObjects || types->getTypeObjectNoBarrier(i);
if (!types->getSingletonNoBarrier(i)) {
hasObjectGroups = hasObjectGroups || types->getGroupNoBarrier(i);
continue;
}
if (lastBranch.isInitialized())
lastBranch.emit(*this);
JSObject *object = types->getSingleObjectNoBarrier(i);
JSObject *object = types->getSingletonNoBarrier(i);
lastBranch = BranchGCPtr(Equal, obj, ImmGCPtr(object), &matched);
}
if (hasTypeObjects) {
if (hasObjectGroups) {
// We are possibly going to overwrite the obj register. So already
// emit the branch, since branch depends on previous value of obj
// register and there is definitely a branch following. So no need
@ -201,17 +201,17 @@ MacroAssembler::guardObjectType(Register obj, const TypeSet *types,
// Note: Some platforms give the same register for obj and scratch.
// Make sure when writing to scratch, the obj register isn't used anymore!
loadPtr(Address(obj, JSObject::offsetOfType()), scratch);
loadPtr(Address(obj, JSObject::offsetOfGroup()), scratch);
for (unsigned i = 0; i < count; i++) {
if (!types->getTypeObjectNoBarrier(i))
if (!types->getGroupNoBarrier(i))
continue;
if (lastBranch.isInitialized())
lastBranch.emit(*this);
types::TypeObject *object = types->getTypeObjectNoBarrier(i);
lastBranch = BranchGCPtr(Equal, scratch, ImmGCPtr(object), &matched);
types::ObjectGroup *group = types->getGroupNoBarrier(i);
lastBranch = BranchGCPtr(Equal, scratch, ImmGCPtr(group), &matched);
}
}
@ -1192,7 +1192,7 @@ MacroAssembler::initGCThing(Register obj, Register slots, JSObject *templateObj,
// Fast initialization of an empty object returned by allocateObject().
storePtr(ImmGCPtr(templateObj->lastProperty()), Address(obj, JSObject::offsetOfShape()));
storePtr(ImmGCPtr(templateObj->type()), Address(obj, JSObject::offsetOfType()));
storePtr(ImmGCPtr(templateObj->group()), Address(obj, JSObject::offsetOfGroup()));
if (templateObj->isNative()) {
NativeObject *ntemplate = &templateObj->as<NativeObject>();

Просмотреть файл

@ -289,13 +289,13 @@ class MacroAssembler : public MacroAssemblerSpecific
loadPtr(Address(dest, Shape::offsetOfBase()), dest);
}
void loadObjClass(Register objReg, Register dest) {
loadPtr(Address(objReg, JSObject::offsetOfType()), dest);
loadPtr(Address(dest, types::TypeObject::offsetOfClasp()), dest);
loadPtr(Address(objReg, JSObject::offsetOfGroup()), dest);
loadPtr(Address(dest, types::ObjectGroup::offsetOfClasp()), dest);
}
void branchTestObjClass(Condition cond, Register obj, Register scratch, const js::Class *clasp,
Label *label) {
loadPtr(Address(obj, JSObject::offsetOfType()), scratch);
branchPtr(cond, Address(scratch, types::TypeObject::offsetOfClasp()), ImmPtr(clasp), label);
loadPtr(Address(obj, JSObject::offsetOfGroup()), scratch);
branchPtr(cond, Address(scratch, types::ObjectGroup::offsetOfClasp()), ImmPtr(clasp), label);
}
void branchTestObjShape(Condition cond, Register obj, const Shape *shape, Label *label) {
branchPtr(cond, Address(obj, JSObject::offsetOfShape()), ImmGCPtr(shape), label);
@ -347,8 +347,8 @@ class MacroAssembler : public MacroAssemblerSpecific
}
void loadObjProto(Register obj, Register dest) {
loadPtr(Address(obj, JSObject::offsetOfType()), dest);
loadPtr(Address(dest, types::TypeObject::offsetOfProto()), dest);
loadPtr(Address(obj, JSObject::offsetOfGroup()), dest);
loadPtr(Address(dest, types::ObjectGroup::offsetOfProto()), dest);
}
void loadStringLength(Register str, Register dest) {

Просмотреть файл

@ -199,7 +199,7 @@ static inline HashNumber
HashType(types::Type ty)
{
if (ty.isObjectUnchecked())
return PointerHasher<types::TypeObjectKey *, 3>::hash(ty.objectKey());
return PointerHasher<types::ObjectGroupKey *, 3>::hash(ty.objectKey());
return HashNumber(ty.raw());
}

Просмотреть файл

@ -3218,6 +3218,8 @@ RangeAnalysis::prepareForUCE(bool *shouldRemoveDeadCode)
{
*shouldRemoveDeadCode = false;
MDefinitionVector deadConditions(alloc());
for (ReversePostorderIterator iter(graph_.rpoBegin()); iter != graph_.rpoEnd(); iter++) {
MBasicBlock *block = *iter;
@ -3232,6 +3234,7 @@ RangeAnalysis::prepareForUCE(bool *shouldRemoveDeadCode)
// chosen based which of the successors has the unreachable flag which is
// added by MBeta::computeRange on its own block.
MTest *test = cond->toTest();
MDefinition *condition = test->input();
MConstant *constant = nullptr;
if (block == test->ifTrue()) {
constant = MConstant::New(alloc(), BooleanValue(false));
@ -3239,7 +3242,15 @@ RangeAnalysis::prepareForUCE(bool *shouldRemoveDeadCode)
MOZ_ASSERT(block == test->ifFalse());
constant = MConstant::New(alloc(), BooleanValue(true));
}
if (DeadIfUnused(condition) && !condition->isInWorklist()) {
condition->setInWorklist();
if (!deadConditions.append(condition))
return false;
}
test->block()->insertBefore(test, constant);
test->replaceOperand(0, constant);
JitSpew(JitSpew_Range, "Update condition of %d to reflect unreachable branches.",
test->id());
@ -3247,5 +3258,55 @@ RangeAnalysis::prepareForUCE(bool *shouldRemoveDeadCode)
*shouldRemoveDeadCode = true;
}
// Flag all fallible instructions which were indirectly used in the
// computation of the condition, such that we do not ignore
// bailout-paths which are used to shrink the input range of the
// operands of the condition.
for (size_t i = 0; i < deadConditions.length(); i++) {
MDefinition *cond = deadConditions[i];
// If this instruction is a guard, then there is not need to continue on
// this instruction.
if (cond->isGuard())
continue;
if (cond->range()) {
// Filter the range of the instruction based on its MIRType.
Range typeFilteredRange(cond);
// If the filtered range is updated by adding the original range,
// then the MIRType act as an effectful filter. As we do not know if
// this filtered Range might change or not the result of the
// previous comparison, we have to keep this instruction as a guard
// because it has to bailout in order to restrict the Range to its
// MIRType.
if (typeFilteredRange.update(cond->range())) {
cond->setGuard();
continue;
}
}
for (size_t op = 0, e = cond->numOperands(); op < e; op++) {
MDefinition *operand = cond->getOperand(op);
if (!DeadIfUnused(operand) || operand->isInWorklist())
continue;
// If the operand has no range, then its range is always infered
// from its MIRType, so it cannot be used change the result deduced
// by Range Analysis.
if (!operand->range())
continue;
operand->setInWorklist();
if (!deadConditions.append(operand))
return false;
}
}
while (!deadConditions.empty()) {
MDefinition *cond = deadConditions.popCopy();
cond->setNotInWorklist();
}
return true;
}

Просмотреть файл

@ -971,10 +971,10 @@ RStringSplit::recover(JSContext *cx, SnapshotIterator &iter) const
{
RootedString str(cx, iter.read().toString());
RootedString sep(cx, iter.read().toString());
RootedTypeObject typeObj(cx, iter.read().toObject().type());
RootedObjectGroup group(cx, iter.read().toObject().group());
RootedValue result(cx);
JSObject *res = str_split_string(cx, typeObj, str, sep);
JSObject *res = str_split_string(cx, group, str, sep);
if (!res)
return false;
@ -1211,13 +1211,13 @@ RNewArray::recover(JSContext *cx, SnapshotIterator &iter) const
{
RootedObject templateObject(cx, &iter.read().toObject());
RootedValue result(cx);
RootedTypeObject type(cx);
RootedObjectGroup group(cx);
// See CodeGenerator::visitNewArrayCallVM
if (!templateObject->hasSingletonType())
type = templateObject->type();
if (!templateObject->isSingleton())
group = templateObject->group();
JSObject *resultObject = NewDenseArray(cx, count_, type, allocatingBehaviour_);
JSObject *resultObject = NewDenseArray(cx, count_, group, allocatingBehaviour_);
if (!resultObject)
return false;

Просмотреть файл

@ -287,16 +287,16 @@ template bool StringsEqual<false>(JSContext *cx, HandleString lhs, HandleString
JSObject*
NewInitObject(JSContext *cx, HandlePlainObject templateObject)
{
NewObjectKind newKind = templateObject->hasSingletonType() ? SingletonObject : GenericObject;
if (!templateObject->hasLazyType() && templateObject->type()->shouldPreTenure())
NewObjectKind newKind = templateObject->isSingleton() ? SingletonObject : GenericObject;
if (!templateObject->hasLazyGroup() && templateObject->group()->shouldPreTenure())
newKind = TenuredObject;
RootedObject obj(cx, CopyInitializerObject(cx, templateObject, newKind));
if (!obj)
return nullptr;
if (!templateObject->hasSingletonType())
obj->setType(templateObject->type());
if (!templateObject->isSingleton())
obj->setGroup(templateObject->group());
return obj;
}
@ -519,9 +519,9 @@ MallocWrapper(JSRuntime *rt, size_t nbytes)
}
JSObject *
NewCallObject(JSContext *cx, HandleShape shape, HandleTypeObject type, uint32_t lexicalBegin)
NewCallObject(JSContext *cx, HandleShape shape, HandleObjectGroup group, uint32_t lexicalBegin)
{
JSObject *obj = CallObject::create(cx, shape, type, lexicalBegin);
JSObject *obj = CallObject::create(cx, shape, group, lexicalBegin);
if (!obj)
return nullptr;
@ -913,7 +913,7 @@ InitRestParameter(JSContext *cx, uint32_t length, Value *rest, HandleObject temp
Rooted<ArrayObject*> arrRes(cx, &objRes->as<ArrayObject>());
MOZ_ASSERT(!arrRes->getDenseInitializedLength());
MOZ_ASSERT(arrRes->type() == templateObj->type());
MOZ_ASSERT(arrRes->group() == templateObj->group());
// Fast path: we managed to allocate the array inline; initialize the
// slots.
@ -927,12 +927,12 @@ InitRestParameter(JSContext *cx, uint32_t length, Value *rest, HandleObject temp
return arrRes;
}
NewObjectKind newKind = templateObj->type()->shouldPreTenure()
NewObjectKind newKind = templateObj->group()->shouldPreTenure()
? TenuredObject
: GenericObject;
ArrayObject *arrRes = NewDenseCopiedArray(cx, length, rest, nullptr, newKind);
if (arrRes)
arrRes->setType(templateObj->type());
arrRes->setGroup(templateObj->group());
return arrRes;
}
@ -1177,8 +1177,8 @@ AssertValidObjectPtr(JSContext *cx, JSObject *obj)
MOZ_ASSERT(obj->compartment() == cx->compartment());
MOZ_ASSERT(obj->runtimeFromMainThread() == cx->runtime());
MOZ_ASSERT_IF(!obj->hasLazyType(),
obj->type()->clasp() == obj->lastProperty()->getObjectClass());
MOZ_ASSERT_IF(!obj->hasLazyGroup(),
obj->group()->clasp() == obj->lastProperty()->getObjectClass());
if (obj->isTenured()) {
MOZ_ASSERT(obj->isAligned());
@ -1288,9 +1288,9 @@ MarkShapeFromIon(JSRuntime *rt, Shape **shapep)
}
void
MarkTypeObjectFromIon(JSRuntime *rt, types::TypeObject **typep)
MarkObjectGroupFromIon(JSRuntime *rt, types::ObjectGroup **groupp)
{
gc::MarkTypeObjectUnbarriered(&rt->gc.marker, typep, "write barrier");
gc::MarkObjectGroupUnbarriered(&rt->gc.marker, groupp, "write barrier");
}
bool

Просмотреть файл

@ -344,8 +344,8 @@ template <> struct TypeToArgProperties<MutableHandleValue> {
template <> struct TypeToArgProperties<HandleShape> {
static const uint32_t result = TypeToArgProperties<Shape *>::result | VMFunction::ByRef;
};
template <> struct TypeToArgProperties<HandleTypeObject> {
static const uint32_t result = TypeToArgProperties<types::TypeObject *>::result | VMFunction::ByRef;
template <> struct TypeToArgProperties<HandleObjectGroup> {
static const uint32_t result = TypeToArgProperties<types::ObjectGroup *>::result | VMFunction::ByRef;
};
// Convert argument type to whether or not it should be passed in a float
@ -382,7 +382,7 @@ template <> struct TypeToRootType<MutableHandleValue> {
template <> struct TypeToRootType<HandleShape> {
static const uint32_t result = VMFunction::RootCell;
};
template <> struct TypeToRootType<HandleTypeObject> {
template <> struct TypeToRootType<HandleObjectGroup> {
static const uint32_t result = VMFunction::RootCell;
};
template <> struct TypeToRootType<HandleScript> {
@ -674,7 +674,7 @@ bool SetProperty(JSContext *cx, HandleObject obj, HandlePropertyName name, Handl
bool InterruptCheck(JSContext *cx);
void *MallocWrapper(JSRuntime *rt, size_t nbytes);
JSObject *NewCallObject(JSContext *cx, HandleShape shape, HandleTypeObject type,
JSObject *NewCallObject(JSContext *cx, HandleShape shape, HandleObjectGroup group,
uint32_t lexicalBegin);
JSObject *NewSingletonCallObject(JSContext *cx, HandleShape shape, uint32_t lexicalBegin);
JSObject *NewStringObject(JSContext *cx, HandleString str);
@ -759,7 +759,7 @@ void MarkValueFromIon(JSRuntime *rt, Value *vp);
void MarkStringFromIon(JSRuntime *rt, JSString **stringp);
void MarkObjectFromIon(JSRuntime *rt, JSObject **objp);
void MarkShapeFromIon(JSRuntime *rt, Shape **shapep);
void MarkTypeObjectFromIon(JSRuntime *rt, types::TypeObject **typep);
void MarkObjectGroupFromIon(JSRuntime *rt, types::ObjectGroup **groupp);
// Helper for generatePreBarrier.
inline void *
@ -774,8 +774,8 @@ IonMarkFunction(MIRType type)
return JS_FUNC_TO_DATA_PTR(void *, MarkObjectFromIon);
case MIRType_Shape:
return JS_FUNC_TO_DATA_PTR(void *, MarkShapeFromIon);
case MIRType_TypeObject:
return JS_FUNC_TO_DATA_PTR(void *, MarkTypeObjectFromIon);
case MIRType_ObjectGroup:
return JS_FUNC_TO_DATA_PTR(void *, MarkObjectGroupFromIon);
default: MOZ_CRASH();
}
}

Просмотреть файл

@ -1654,13 +1654,13 @@ CodeGeneratorARM::visitGuardShape(LGuardShape *guard)
}
void
CodeGeneratorARM::visitGuardObjectType(LGuardObjectType *guard)
CodeGeneratorARM::visitGuardObjectGroup(LGuardObjectGroup *guard)
{
Register obj = ToRegister(guard->input());
Register tmp = ToRegister(guard->tempInt());
masm.ma_ldr(DTRAddr(obj, DtrOffImm(JSObject::offsetOfType())), tmp);
masm.ma_cmp(tmp, ImmGCPtr(guard->mir()->typeObject()));
masm.ma_ldr(DTRAddr(obj, DtrOffImm(JSObject::offsetOfGroup())), tmp);
masm.ma_cmp(tmp, ImmGCPtr(guard->mir()->group()));
Assembler::Condition cond =
guard->mir()->bailOnEquality() ? Assembler::Equal : Assembler::NotEqual;

Просмотреть файл

@ -195,7 +195,7 @@ class CodeGeneratorARM : public CodeGeneratorShared
void visitFloat32(LFloat32 *ins);
void visitGuardShape(LGuardShape *guard);
void visitGuardObjectType(LGuardObjectType *guard);
void visitGuardObjectGroup(LGuardObjectGroup *guard);
void visitGuardClass(LGuardClass *guard);
void visitNegI(LNegI *lir);

Просмотреть файл

@ -380,17 +380,17 @@ class LGuardShape : public LInstructionHelper<0, 1, 1>
}
};
class LGuardObjectType : public LInstructionHelper<0, 1, 1>
class LGuardObjectGroup : public LInstructionHelper<0, 1, 1>
{
public:
LIR_HEADER(GuardObjectType);
LIR_HEADER(GuardObjectGroup);
LGuardObjectType(const LAllocation &in, const LDefinition &temp) {
LGuardObjectGroup(const LAllocation &in, const LDefinition &temp) {
setOperand(0, in);
setTemp(0, temp);
}
const MGuardObjectType *mir() const {
return mir_->toGuardObjectType();
const MGuardObjectGroup *mir() const {
return mir_->toGuardObjectGroup();
}
const LDefinition *tempInt() {
return getTemp(0);

Просмотреть файл

@ -387,12 +387,12 @@ LIRGeneratorARM::visitGuardShape(MGuardShape *ins)
}
void
LIRGeneratorARM::visitGuardObjectType(MGuardObjectType *ins)
LIRGeneratorARM::visitGuardObjectGroup(MGuardObjectGroup *ins)
{
MOZ_ASSERT(ins->obj()->type() == MIRType_Object);
LDefinition tempObj = temp(LDefinition::OBJECT);
LGuardObjectType *guard = new(alloc()) LGuardObjectType(useRegister(ins->obj()), tempObj);
LGuardObjectGroup *guard = new(alloc()) LGuardObjectGroup(useRegister(ins->obj()), tempObj);
assignSnapshot(guard, ins->bailoutKind());
add(guard, ins);
redefine(ins, ins->obj());

Просмотреть файл

@ -91,7 +91,7 @@ class LIRGeneratorARM : public LIRGeneratorShared
void visitReturn(MReturn *ret);
void lowerPhi(MPhi *phi);
void visitGuardShape(MGuardShape *ins);
void visitGuardObjectType(MGuardObjectType *ins);
void visitGuardObjectGroup(MGuardObjectGroup *ins);
void visitAsmJSUnsignedToDouble(MAsmJSUnsignedToDouble *ins);
void visitAsmJSUnsignedToFloat32(MAsmJSUnsignedToFloat32 *ins);
void visitAsmJSLoadHeap(MAsmJSLoadHeap *ins);

Просмотреть файл

@ -1744,16 +1744,16 @@ CodeGeneratorMIPS::visitGuardShape(LGuardShape *guard)
}
void
CodeGeneratorMIPS::visitGuardObjectType(LGuardObjectType *guard)
CodeGeneratorMIPS::visitGuardObjectGroup(LGuardObjectGroup *guard)
{
Register obj = ToRegister(guard->input());
Register tmp = ToRegister(guard->tempInt());
masm.loadPtr(Address(obj, JSObject::offsetOfType()), tmp);
masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), tmp);
Assembler::Condition cond = guard->mir()->bailOnEquality()
? Assembler::Equal
: Assembler::NotEqual;
bailoutCmpPtr(cond, tmp, ImmGCPtr(guard->mir()->typeObject()), guard->snapshot());
bailoutCmpPtr(cond, tmp, ImmGCPtr(guard->mir()->group()), guard->snapshot());
}
void

Просмотреть файл

@ -244,7 +244,7 @@ class CodeGeneratorMIPS : public CodeGeneratorShared
void visitFloat32(LFloat32 *ins);
void visitGuardShape(LGuardShape *guard);
void visitGuardObjectType(LGuardObjectType *guard);
void visitGuardObjectGroup(LGuardObjectGroup *guard);
void visitGuardClass(LGuardClass *guard);
void visitNegI(LNegI *lir);

Просмотреть файл

@ -328,17 +328,17 @@ class LGuardShape : public LInstructionHelper<0, 1, 1>
}
};
class LGuardObjectType : public LInstructionHelper<0, 1, 1>
class LGuardObjectGroup : public LInstructionHelper<0, 1, 1>
{
public:
LIR_HEADER(GuardObjectType);
LIR_HEADER(GuardObjectGroup);
LGuardObjectType(const LAllocation &in, const LDefinition &temp) {
LGuardObjectGroup(const LAllocation &in, const LDefinition &temp) {
setOperand(0, in);
setTemp(0, temp);
}
const MGuardObjectType *mir() const {
return mir_->toGuardObjectType();
const MGuardObjectGroup *mir() const {
return mir_->toGuardObjectGroup();
}
const LDefinition *tempInt() {
return getTemp(0);

Просмотреть файл

@ -378,12 +378,12 @@ LIRGeneratorMIPS::visitGuardShape(MGuardShape *ins)
}
void
LIRGeneratorMIPS::visitGuardObjectType(MGuardObjectType *ins)
LIRGeneratorMIPS::visitGuardObjectGroup(MGuardObjectGroup *ins)
{
MOZ_ASSERT(ins->obj()->type() == MIRType_Object);
LDefinition tempObj = temp(LDefinition::OBJECT);
LGuardObjectType *guard = new(alloc()) LGuardObjectType(useRegister(ins->obj()), tempObj);
LGuardObjectGroup *guard = new(alloc()) LGuardObjectGroup(useRegister(ins->obj()), tempObj);
assignSnapshot(guard, ins->bailoutKind());
add(guard, ins);
redefine(ins, ins->obj());

Просмотреть файл

@ -91,7 +91,7 @@ class LIRGeneratorMIPS : public LIRGeneratorShared
void visitReturn(MReturn *ret);
void lowerPhi(MPhi *phi);
void visitGuardShape(MGuardShape *ins);
void visitGuardObjectType(MGuardObjectType *ins);
void visitGuardObjectGroup(MGuardObjectGroup *ins);
void visitAsmJSUnsignedToDouble(MAsmJSUnsignedToDouble *ins);
void visitAsmJSUnsignedToFloat32(MAsmJSUnsignedToFloat32 *ins);
void visitAsmJSLoadHeap(MAsmJSLoadHeap *ins);

Просмотреть файл

@ -44,7 +44,7 @@ class LTableSwitchV : public LInstruction
};
class LGuardShape : public LInstruction {};
class LGuardObjectType : public LInstruction {};
class LGuardObjectGroup : public LInstruction {};
class LMulI : public LInstruction {};
} // namespace jit

Просмотреть файл

@ -70,7 +70,7 @@ class LIRGeneratorNone : public LIRGeneratorShared
void visitPowHalf(MPowHalf *) { MOZ_CRASH(); }
void visitAsmJSNeg(MAsmJSNeg *) { MOZ_CRASH(); }
void visitGuardShape(MGuardShape *ins) { MOZ_CRASH(); }
void visitGuardObjectType(MGuardObjectType *ins) { MOZ_CRASH(); }
void visitGuardObjectGroup(MGuardObjectGroup *ins) { MOZ_CRASH(); }
void visitAsmJSUnsignedToDouble(MAsmJSUnsignedToDouble *ins) { MOZ_CRASH(); }
void visitAsmJSUnsignedToFloat32(MAsmJSUnsignedToFloat32 *ins) { MOZ_CRASH(); }
void visitAsmJSLoadHeap(MAsmJSLoadHeap *ins) { MOZ_CRASH(); }

Просмотреть файл

@ -1978,10 +1978,10 @@ CodeGeneratorX86Shared::visitGuardShape(LGuardShape *guard)
}
void
CodeGeneratorX86Shared::visitGuardObjectType(LGuardObjectType *guard)
CodeGeneratorX86Shared::visitGuardObjectGroup(LGuardObjectGroup *guard)
{
Register obj = ToRegister(guard->input());
masm.cmpPtr(Operand(obj, JSObject::offsetOfType()), ImmGCPtr(guard->mir()->typeObject()));
masm.cmpPtr(Operand(obj, JSObject::offsetOfGroup()), ImmGCPtr(guard->mir()->group()));
Assembler::Condition cond =
guard->mir()->bailOnEquality() ? Assembler::Equal : Assembler::NotEqual;
@ -1994,8 +1994,8 @@ CodeGeneratorX86Shared::visitGuardClass(LGuardClass *guard)
Register obj = ToRegister(guard->input());
Register tmp = ToRegister(guard->tempInt());
masm.loadPtr(Address(obj, JSObject::offsetOfType()), tmp);
masm.cmpPtr(Operand(tmp, types::TypeObject::offsetOfClasp()), ImmPtr(guard->mir()->getClass()));
masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), tmp);
masm.cmpPtr(Operand(tmp, types::ObjectGroup::offsetOfClasp()), ImmPtr(guard->mir()->getClass()));
bailoutIf(Assembler::NotEqual, guard->snapshot());
}

Просмотреть файл

@ -198,7 +198,7 @@ class CodeGeneratorX86Shared : public CodeGeneratorShared
virtual void visitRound(LRound *lir);
virtual void visitRoundF(LRoundF *lir);
virtual void visitGuardShape(LGuardShape *guard);
virtual void visitGuardObjectType(LGuardObjectType *guard);
virtual void visitGuardObjectGroup(LGuardObjectGroup *guard);
virtual void visitGuardClass(LGuardClass *guard);
virtual void visitEffectiveAddress(LEffectiveAddress *ins);
virtual void visitUDivOrMod(LUDivOrMod *ins);

Просмотреть файл

@ -282,16 +282,16 @@ class LGuardShape : public LInstructionHelper<0, 1, 0>
}
};
class LGuardObjectType : public LInstructionHelper<0, 1, 0>
class LGuardObjectGroup : public LInstructionHelper<0, 1, 0>
{
public:
LIR_HEADER(GuardObjectType)
LIR_HEADER(GuardObjectGroup)
explicit LGuardObjectType(const LAllocation &in) {
explicit LGuardObjectGroup(const LAllocation &in) {
setOperand(0, in);
}
const MGuardObjectType *mir() const {
return mir_->toGuardObjectType();
const MGuardObjectGroup *mir() const {
return mir_->toGuardObjectGroup();
}
};

Просмотреть файл

@ -44,11 +44,11 @@ LIRGeneratorX86Shared::visitGuardShape(MGuardShape *ins)
}
void
LIRGeneratorX86Shared::visitGuardObjectType(MGuardObjectType *ins)
LIRGeneratorX86Shared::visitGuardObjectGroup(MGuardObjectGroup *ins)
{
MOZ_ASSERT(ins->obj()->type() == MIRType_Object);
LGuardObjectType *guard = new(alloc()) LGuardObjectType(useRegisterAtStart(ins->obj()));
LGuardObjectGroup *guard = new(alloc()) LGuardObjectGroup(useRegisterAtStart(ins->obj()));
assignSnapshot(guard, ins->bailoutKind());
add(guard, ins);
redefine(ins, ins->obj());

Просмотреть файл

@ -24,7 +24,7 @@ class LIRGeneratorX86Shared : public LIRGeneratorShared
LTableSwitchV *newLTableSwitchV(MTableSwitch *ins);
void visitGuardShape(MGuardShape *ins);
void visitGuardObjectType(MGuardObjectType *ins);
void visitGuardObjectGroup(MGuardObjectGroup *ins);
void visitPowHalf(MPowHalf *ins);
void lowerForShift(LInstructionHelper<1, 2, 0> *ins, MDefinition *mir, MDefinition *lhs,
MDefinition *rhs);

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше