Merge mozilla-inbound to mozilla-central. a=merge

This commit is contained in:
Andreea Pavel 2019-05-14 07:10:36 +03:00
Родитель 8eca20e3ff a2a47acbcd
Коммит 4bf5812d4c
62 изменённых файлов: 1310 добавлений и 956 удалений

Просмотреть файл

@ -35,7 +35,7 @@
"chrome_settings_overrides": {
"url": "chrome://browser/content/parent/ext-chrome-settings-overrides.js",
"scopes": [],
"events": ["update", "uninstall"],
"events": ["update", "uninstall", "disable"],
"schema": "chrome://browser/content/schemas/chrome_settings_overrides.json",
"manifest": ["chrome_settings_overrides"]
},
@ -175,6 +175,7 @@
"url": "chrome://browser/content/parent/ext-sidebarAction.js",
"schema": "chrome://browser/content/schemas/sidebar_action.json",
"scopes": ["addon_parent"],
"events": ["uninstall"],
"manifest": ["sidebar_action"],
"paths": [
["sidebarAction"]
@ -193,7 +194,7 @@
"url": "chrome://browser/content/parent/ext-url-overrides.js",
"schema": "chrome://browser/content/schemas/url_overrides.json",
"scopes": ["addon_parent"],
"events": ["uninstall"],
"events": ["update", "uninstall", "disable"],
"manifest": ["chrome_url_overrides"],
"paths": [
["urlOverrides"]

Просмотреть файл

@ -115,7 +115,7 @@ this.browserAction = class extends ExtensionAPI {
}
}
onShutdown(reason) {
onShutdown() {
browserActionMap.delete(this.extension);
this.tabContext.shutdown();

Просмотреть файл

@ -175,6 +175,13 @@ this.chrome_settings_overrides = class extends ExtensionAPI {
}
}
static onDisable(id) {
homepagePopup.clearConfirmation(id);
chrome_settings_overrides.processDefaultSearchSetting("disable", id);
chrome_settings_overrides.removeEngine(id);
}
async onManifestEntry(entryName) {
let {extension} = this;
let {manifest} = extension;
@ -193,8 +200,7 @@ this.chrome_settings_overrides = class extends ExtensionAPI {
let item = await ExtensionPreferencesManager.getSetting("homepage_override");
inControl = item && item.id == extension.id;
}
// We need to add the listener here too since onPrefsChanged won't trigger on a
// restart (the prefs are already set).
if (inControl) {
Services.prefs.setBoolPref(HOMEPAGE_PRIVATE_ALLOWED, extension.privateBrowsingAllowed);
// Also set this now as an upgraded browser will need this.
@ -225,14 +231,6 @@ this.chrome_settings_overrides = class extends ExtensionAPI {
}
}
});
extension.callOnClose({
close: () => {
if (extension.shutdownReason == "ADDON_DISABLE") {
homepagePopup.clearConfirmation(extension.id);
}
},
});
}
if (manifest.chrome_settings_overrides.search_provider) {
// Registering a search engine can potentially take a long while,
@ -261,14 +259,6 @@ this.chrome_settings_overrides = class extends ExtensionAPI {
return;
}
}
extension.callOnClose({
close: () => {
if (extension.shutdownReason == "ADDON_DISABLE") {
chrome_settings_overrides.processDefaultSearchSetting("disable", extension.id);
chrome_settings_overrides.removeEngine(extension.id);
}
},
});
let engineName = searchProvider.name.trim();
if (searchProvider.is_default) {

Просмотреть файл

@ -20,7 +20,7 @@ this.commands = class extends ExtensionAPI {
await shortcuts.register();
}
onShutdown(reason) {
onShutdown() {
this.extension.shortcuts.unregister();
}

Просмотреть файл

@ -356,7 +356,7 @@ this.devtools = class extends ExtensionAPI {
DevToolsShim.on("toolbox-destroy", this.onToolboxDestroy);
}
onShutdown(reason) {
onShutdown() {
DevToolsShim.off("toolbox-created", this.onToolboxCreated);
DevToolsShim.off("toolbox-destroy", this.onToolboxDestroy);

Просмотреть файл

@ -1100,7 +1100,7 @@ this.menusInternal = class extends ExtensionAPI {
gMenuMap.set(extension, new Map());
}
onShutdown(reason) {
onShutdown() {
let {extension} = this;
if (gMenuMap.has(extension)) {

Просмотреть файл

@ -20,7 +20,7 @@ this.omnibox = class extends ExtensionAPI {
}
}
onShutdown(reason) {
onShutdown() {
ExtensionSearchHandler.unregisterKeyword(this.keyword);
}

Просмотреть файл

@ -118,7 +118,7 @@ this.pageAction = class extends ExtensionAPI {
}
}
onShutdown(reason) {
onShutdown(isAppShutdown) {
pageActionMap.delete(this.extension);
this.tabContext.shutdown();
@ -127,7 +127,7 @@ this.pageAction = class extends ExtensionAPI {
// across app restarts, so don't remove it on app shutdown, but do remove
// it on all other shutdowns since there's no guarantee the action will be
// coming back.
if (reason != "APP_SHUTDOWN" && this.browserPageAction) {
if (!isAppShutdown && this.browserPageAction) {
this.browserPageAction.remove();
this.browserPageAction = null;
}

Просмотреть файл

@ -88,14 +88,14 @@ this.sidebarAction = class extends ExtensionAPI {
this.build();
}
onShutdown(reason) {
onShutdown(isAppShutdown) {
sidebarActionMap.delete(this.this);
this.tabContext.shutdown();
// Don't remove everything on app shutdown so session restore can handle
// restoring open sidebars.
if (reason === "APP_SHUTDOWN") {
if (isAppShutdown) {
return;
}
@ -104,10 +104,6 @@ this.sidebarAction = class extends ExtensionAPI {
if (SidebarUI.currentID === this.id) {
SidebarUI.hide();
}
if (SidebarUI.lastOpenedId === this.id &&
reason === "ADDON_UNINSTALL") {
SidebarUI.lastOpenedId = null;
}
let menu = document.getElementById(this.menuId);
if (menu) {
menu.remove();
@ -124,6 +120,16 @@ this.sidebarAction = class extends ExtensionAPI {
windowTracker.removeCloseListener(this.windowCloseListener);
}
static onUninstall(id) {
const sidebarId = `${makeWidgetId(id)}-sidebar-action`;
for (let window of windowTracker.browserWindows()) {
let {SidebarUI} = window;
if (SidebarUI.lastOpenedId === sidebarId) {
SidebarUI.lastOpenedId = null;
}
}
}
build() {
this.tabContext.on("tab-select", // eslint-disable-line mozilla/balanced-listeners
(evt, tab) => { this.updateWindow(tab.ownerGlobal); });

Просмотреть файл

@ -93,14 +93,32 @@ ExtensionParent.apiManager.on("extension-setting-changed", async (eventName, set
});
this.urlOverrides = class extends ExtensionAPI {
static onUninstall(id) {
// TODO: This can be removed once bug 1438364 is fixed and all data is cleaned up.
static async onDisable(id) {
newTabPopup.clearConfirmation(id);
await ExtensionSettingsStore.initialize();
if (ExtensionSettingsStore.hasSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME)) {
ExtensionSettingsStore.disable(id, STORE_TYPE, NEW_TAB_SETTING_NAME);
}
}
processNewTabSetting(action) {
let {extension} = this;
ExtensionSettingsStore[action](extension.id, STORE_TYPE, NEW_TAB_SETTING_NAME);
static async onUninstall(id) {
// TODO: This can be removed once bug 1438364 is fixed and all data is cleaned up.
newTabPopup.clearConfirmation(id);
await ExtensionSettingsStore.initialize();
if (ExtensionSettingsStore.hasSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME)) {
ExtensionSettingsStore.removeSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME);
}
}
static async onUpdate(id, manifest) {
if (!manifest.chrome_url_overrides ||
!manifest.chrome_url_overrides.newtab) {
await ExtensionSettingsStore.initialize();
if (ExtensionSettingsStore.hasSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME)) {
ExtensionSettingsStore.removeSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME);
}
}
}
async onManifestEntry(entryName) {
@ -110,40 +128,12 @@ this.urlOverrides = class extends ExtensionAPI {
await ExtensionSettingsStore.initialize();
if (manifest.chrome_url_overrides.newtab) {
// Set up the shutdown code for the setting.
extension.callOnClose({
close: () => {
switch (extension.shutdownReason) {
case "ADDON_DISABLE":
this.processNewTabSetting("disable");
newTabPopup.clearConfirmation(extension.id);
break;
// We can remove the setting on upgrade or downgrade because it will be
// added back in when the manifest is re-read. This will cover the case
// where a new version of an add-on removes the manifest key.
case "ADDON_DOWNGRADE":
case "ADDON_UPGRADE":
case "ADDON_UNINSTALL":
this.processNewTabSetting("removeSetting");
break;
}
},
});
let url = extension.baseURI.resolve(manifest.chrome_url_overrides.newtab);
let item = await ExtensionSettingsStore.addSetting(
extension.id, STORE_TYPE, NEW_TAB_SETTING_NAME, url,
() => aboutNewTabService.newTabURL);
// If the extension was just re-enabled, change the setting to enabled.
// This is required because addSetting above is used for both add and update.
if (["ADDON_ENABLE", "ADDON_UPGRADE", "ADDON_DOWNGRADE"]
.includes(extension.startupReason)) {
item = ExtensionSettingsStore.enable(extension.id, STORE_TYPE, NEW_TAB_SETTING_NAME);
}
// Set the newTabURL to the current value of the setting.
if (item) {
setNewTabURL(item.id, item.value || item.initialValue);

Просмотреть файл

@ -130,8 +130,8 @@ this.formautofill = class extends ExtensionAPI {
Services.mm.loadFrameScript("chrome://formautofill/content/FormAutofillFrameScript.js", true, true);
}
onShutdown(reason) {
if (reason == "APP_SHUTDOWN") {
onShutdown(isAppShutdown) {
if (isAppShutdown) {
return;
}

Просмотреть файл

@ -16,8 +16,8 @@ XPCOMUtils.defineLazyGetter(this, "l10nStrings", function() {
let l10nManifest;
this.l10n = class extends ExtensionAPI {
onShutdown(reason) {
if (reason !== "APP_SHUTDOWN" && l10nManifest) {
onShutdown(isAppShutdown) {
if (!isAppShutdown && l10nManifest) {
Components.manager.removeBootstrappedManifestLocation(l10nManifest);
}
}

Просмотреть файл

@ -138,6 +138,10 @@ const proto = {
g.ownPropertyLength = getArrayLength(this.obj);
} else if (isStorage(g)) {
g.ownPropertyLength = getStorageLength(this.obj);
} else if (isReplaying) {
// When replaying we can get the number of properties directly, to avoid
// needing to enumerate all of them.
g.ownPropertyLength = this.obj.getOwnPropertyNamesCount();
} else {
try {
g.ownPropertyLength = this.obj.getOwnPropertyNames().length;
@ -335,6 +339,13 @@ const proto = {
return safeGetterValues;
}
// Do not search for safe getters while replaying. While this would be nice
// to support, it involves a lot of back-and-forth between processes and
// would be better to do entirely in the replaying process.
if (isReplaying) {
return safeGetterValues;
}
// Most objects don't have any safe getters but inherit some from their
// prototype. Avoid calling getOwnPropertyNames on objects that may have
// many properties like Array, strings or js objects. That to avoid

Просмотреть файл

@ -377,6 +377,10 @@ function GenericObject(objectActor, grip, rawObj, specialStringBehavior = false)
for (let j = 0; j < rawObj.length; j++) {
names.push(rawObj.key(j));
}
} else if (isReplaying) {
// When replaying we can access a batch of properties for use in generating
// the preview. This avoids needing to enumerate all properties.
names = obj.getEnumerableOwnPropertyNamesForPreview();
} else {
names = obj.getOwnPropertyNames();
}
@ -781,6 +785,12 @@ previewers.Object = [
// - The array indices are consecutive.
// - The value of "length", if present, is the number of array indices.
// Don't generate pseudo array previews when replaying. We don't want to
// have to enumerate all the properties in order to determine this.
if (isReplaying) {
return false;
}
let keys;
try {
keys = obj.getOwnPropertyNames();

Просмотреть файл

@ -194,6 +194,7 @@ ReplayDebugger.prototype = {
if (this._paused) {
// If we resume and immediately pause, we are at an endpoint of the
// recording. Force the thread to pause.
this._capturePauseData();
this.replayingOnForcedPause(this.getNewestFrame());
}
});
@ -209,6 +210,7 @@ ReplayDebugger.prototype = {
// timeWarp() doesn't return until the child has reached the target of
// the warp, after which we force the thread to pause.
assert(this._paused);
this._capturePauseData();
this.replayingOnForcedPause(this.getNewestFrame());
});
},
@ -352,6 +354,48 @@ ReplayDebugger.prototype = {
this._objects.length = 0;
},
// Fill in the debugger with (hopefully) all data the client/server need to
// pause at the current location.
_capturePauseData() {
if (this._frames.length) {
return;
}
const pauseData = this._sendRequestAllowDiverge({ type: "pauseData" });
if (!pauseData.frames) {
return;
}
for (const data of Object.values(pauseData.scripts)) {
this._addScript(data);
}
for (const { scriptId, offset, metadata} of pauseData.offsetMetadata) {
if (this._scripts[scriptId]) {
const script = this._getScript(scriptId);
script._addOffsetMetadata(offset, metadata);
}
}
for (const { data, preview } of Object.values(pauseData.objects)) {
if (!this._objects[data.id]) {
this._addObject(data);
}
this._getObject(data.id)._preview = preview;
}
for (const { data, names } of Object.values(pauseData.environments)) {
if (!this._objects[data.id]) {
this._addObject(data);
}
this._getObject(data.id)._names = names;
}
for (const frame of pauseData.frames) {
this._frames[frame.index] = new ReplayDebuggerFrame(this, frame);
}
},
/////////////////////////////////////////////////////////
// Search management
/////////////////////////////////////////////////////////
@ -556,20 +600,24 @@ ReplayDebugger.prototype = {
_getObject(id) {
if (id && !this._objects[id]) {
const data = this._sendRequest({ type: "getObject", id });
switch (data.kind) {
case "Object":
this._objects[id] = new ReplayDebuggerObject(this, data);
break;
case "Environment":
this._objects[id] = new ReplayDebuggerEnvironment(this, data);
break;
default:
ThrowError("Unknown object kind");
}
this._addObject(data);
}
return this._objects[id];
},
_addObject(data) {
switch (data.kind) {
case "Object":
this._objects[data.id] = new ReplayDebuggerObject(this, data);
break;
case "Environment":
this._objects[data.id] = new ReplayDebuggerEnvironment(this, data);
break;
default:
ThrowError("Unknown object kind");
}
},
// Convert a value we received from the child.
_convertValue(value) {
if (isNonNullObject(value)) {
@ -694,8 +742,10 @@ ReplayDebugger.prototype = {
set replayingOnPopFrame(handler) {
if (handler) {
this._setBreakpoint(() => { handler.call(this, this.getNewestFrame()); },
{ kind: "OnPop" }, handler);
this._setBreakpoint(() => {
this._capturePauseData();
handler.call(this, this.getNewestFrame());
}, { kind: "OnPop" }, handler);
} else {
this._clearMatchingBreakpoints(({position}) => {
return position.kind == "OnPop" && !position.script;
@ -727,6 +777,7 @@ ReplayDebugger.prototype = {
function ReplayDebuggerScript(dbg, data) {
this._dbg = dbg;
this._data = data;
this._offsetMetadata = [];
}
ReplayDebuggerScript.prototype = {
@ -749,7 +800,6 @@ ReplayDebuggerScript.prototype = {
getSuccessorOffsets(pc) { return this._forward("getSuccessorOffsets", pc); },
getPredecessorOffsets(pc) { return this._forward("getPredecessorOffsets", pc); },
getAllColumnOffsets() { return this._forward("getAllColumnOffsets"); },
getOffsetMetadata(pc) { return this._forward("getOffsetMetadata", pc); },
getPossibleBreakpoints(query) {
return this._forward("getPossibleBreakpoints", query);
},
@ -757,10 +807,22 @@ ReplayDebuggerScript.prototype = {
return this._forward("getPossibleBreakpointOffsets", query);
},
getOffsetMetadata(pc) {
if (!this._offsetMetadata[pc]) {
this._addOffsetMetadata(pc, this._forward("getOffsetMetadata", pc));
}
return this._offsetMetadata[pc];
},
_addOffsetMetadata(pc, metadata) {
this._offsetMetadata[pc] = metadata;
},
setBreakpoint(offset, handler) {
this._dbg._setBreakpoint(() => { handler.hit(this._dbg.getNewestFrame()); },
{ kind: "Break", script: this._data.id, offset },
handler);
this._dbg._setBreakpoint(() => {
this._dbg._capturePauseData();
handler.hit(this._dbg.getNewestFrame());
}, { kind: "Break", script: this._data.id, offset }, handler);
},
clearBreakpoint(handler) {
@ -867,13 +929,15 @@ ReplayDebuggerFrame.prototype = {
setReplayingOnStep(handler, offsets) {
offsets.forEach(offset => {
this._dbg._setBreakpoint(
() => { handler.call(this._dbg.getNewestFrame()); },
{ kind: "OnStep",
script: this._data.script,
offset,
frameIndex: this._data.index },
handler);
this._dbg._setBreakpoint(() => {
this._dbg._capturePauseData();
handler.call(this._dbg.getNewestFrame());
}, {
kind: "OnStep",
script: this._data.script,
offset,
frameIndex: this._data.index,
}, handler);
});
},
@ -886,6 +950,7 @@ ReplayDebuggerFrame.prototype = {
set onPop(handler) {
if (handler) {
this._dbg._setBreakpoint(() => {
this._dbg._capturePauseData();
const result = this._dbg._sendRequest({ type: "popFrameResult" });
handler.call(this._dbg.getNewestFrame(),
this._dbg._convertCompletionValue(result));
@ -917,15 +982,15 @@ ReplayDebuggerFrame.prototype = {
function ReplayDebuggerObject(dbg, data) {
this._dbg = dbg;
this._data = data;
this._preview = null;
this._properties = null;
this._proxyData = null;
}
ReplayDebuggerObject.prototype = {
_invalidate() {
this._data = null;
this._preview = null;
this._properties = null;
this._proxyData = null;
},
get callable() { return this._data.callable; },
@ -956,28 +1021,56 @@ ReplayDebuggerObject.prototype = {
return Object.keys(this._properties);
},
getEnumerableOwnPropertyNamesForPreview() {
if (this._preview) {
return Object.keys(this._preview.enumerableOwnProperties);
}
return this.getOwnPropertyNames();
},
getOwnPropertyNamesCount() {
if (this._preview) {
return this._preview.ownPropertyNamesCount;
}
return this.getOwnPropertyNames().length;
},
getOwnPropertySymbols() {
// Symbol properties are not handled yet.
return [];
},
getOwnPropertyDescriptor(name) {
if (this._preview) {
if (this._preview.enumerableOwnProperties) {
const desc = this._preview.enumerableOwnProperties[name];
if (desc) {
return this._convertPropertyDescriptor(desc);
}
}
if (name == "length") {
return this._convertPropertyDescriptor(this._preview.lengthProperty);
}
if (name == "displayName") {
return this._convertPropertyDescriptor(this._preview.displayNameProperty);
}
}
this._ensureProperties();
const desc = this._properties[name];
return desc ? this._convertPropertyDescriptor(desc) : undefined;
return this._convertPropertyDescriptor(this._properties[name]);
},
_ensureProperties() {
if (!this._properties) {
const id = this._data.id;
const properties =
this._properties =
this._dbg._sendRequestAllowDiverge({ type: "getObjectProperties", id });
this._properties = Object.create(null);
properties.forEach(({name, desc}) => { this._properties[name] = desc; });
}
},
_convertPropertyDescriptor(desc) {
if (!desc) {
return undefined;
}
const rv = Object.assign({}, desc);
if ("value" in desc) {
rv.value = this._dbg._convertValue(desc.value);
@ -991,35 +1084,19 @@ ReplayDebuggerObject.prototype = {
return rv;
},
_ensureProxyData() {
if (!this._proxyData) {
const data = this._dbg._sendRequestAllowDiverge({
type: "objectProxyData",
id: this._data.id,
});
if (data.exception) {
throw new Error(data.exception);
}
this._proxyData = data;
}
},
unwrap() {
if (!this.isProxy) {
return this;
}
this._ensureProxyData();
return this._dbg._convertValue(this._proxyData.unwrapped);
return this._dbg._convertValue(this._data.proxyUnwrapped);
},
get proxyTarget() {
this._ensureProxyData();
return this._dbg._convertValue(this._proxyData.target);
return this._dbg._convertValue(this._data.proxyTarget);
},
get proxyHandler() {
this._ensureProxyData();
return this._dbg._convertValue(this._proxyData.handler);
return this._dbg._convertValue(this._data.proxyHandler);
},
get boundTargetFunction() {

Просмотреть файл

@ -213,7 +213,7 @@ dbg.onNewScript = function(script) {
// answers to the client about the object's contents, without having to consult
// a child process.
function snapshotObjectProperty({ name, desc }) {
function snapshotObjectProperty([ name, desc ]) {
// Only capture primitive properties in object snapshots.
if ("value" in desc && !convertedValueIsObject(desc.value)) {
return { name, desc };
@ -243,7 +243,7 @@ function makeObjectSnapshot(object) {
isExtensible: object.isExtensible(),
isSealed: object.isSealed(),
isFrozen: object.isFrozen(),
properties: getObjectProperties(object).map(snapshotObjectProperty),
properties: Object.entries(getObjectProperties(object)).map(snapshotObjectProperty),
};
}
@ -624,6 +624,31 @@ function forwardToScript(name) {
return request => gScripts.getObject(request.id)[name](request.value);
}
function getFrameData(index) {
const frame = scriptFrameForIndex(index);
let _arguments = null;
if (frame.arguments) {
_arguments = [];
for (let i = 0; i < frame.arguments.length; i++) {
_arguments.push(convertValue(frame.arguments[i]));
}
}
return {
index,
type: frame.type,
callee: getObjectId(frame.callee),
environment: getObjectId(frame.environment),
generator: frame.generator,
constructing: frame.constructing,
this: convertValue(frame.this),
script: gScripts.getId(frame.script),
offset: frame.offset,
arguments: _arguments,
};
}
function unknownObjectProperties(why) {
return [{
name: "Unknown properties",
@ -634,6 +659,55 @@ function unknownObjectProperties(why) {
}];
}
function getObjectData(id) {
const object = gPausedObjects.getObject(id);
if (object instanceof Debugger.Object) {
const rv = {
id,
kind: "Object",
callable: object.callable,
isBoundFunction: object.isBoundFunction,
isArrowFunction: object.isArrowFunction,
isGeneratorFunction: object.isGeneratorFunction,
isAsyncFunction: object.isAsyncFunction,
proto: getObjectId(object.proto),
class: object.class,
name: object.name,
displayName: object.displayName,
parameterNames: object.parameterNames,
script: gScripts.getId(object.script),
environment: getObjectId(object.environment),
isProxy: object.isProxy,
isExtensible: object.isExtensible(),
isSealed: object.isSealed(),
isFrozen: object.isFrozen(),
};
if (rv.isBoundFunction) {
rv.boundTargetFunction = getObjectId(object.boundTargetFunction);
rv.boundThis = convertValue(object.boundThis);
rv.boundArguments = getObjectId(makeDebuggeeValue(object.boundArguments));
}
if (rv.isProxy) {
rv.proxyUnwrapped = convertValue(object.unwrap());
rv.proxyTarget = convertValue(object.proxyTarget);
rv.proxyHandler = convertValue(object.proxyHandler);
}
return rv;
}
if (object instanceof Debugger.Environment) {
return {
id,
kind: "Environment",
type: object.type,
parent: getObjectId(object.parent),
object: object.type == "declarative" ? 0 : getObjectId(object.object),
callee: getObjectId(object.callee),
optimizedOut: object.optimizedOut,
};
}
throw new Error("Unknown object kind");
}
function getObjectProperties(object) {
let names;
try {
@ -642,12 +716,13 @@ function getObjectProperties(object) {
return unknownObjectProperties(e.toString());
}
return names.map(name => {
const rv = Object.create(null);
names.forEach(name => {
let desc;
try {
desc = object.getOwnPropertyDescriptor(name);
} catch (e) {
return { name, desc: { value: "Unknown: " + e, enumerable: true } };
desc = { name, desc: { value: "Unknown: " + e, enumerable: true } };
}
if ("value" in desc) {
desc.value = convertValue(desc.value);
@ -658,8 +733,22 @@ function getObjectProperties(object) {
if ("set" in desc) {
desc.set = getObjectId(desc.set);
}
return { name, desc };
rv[name] = desc;
});
return rv;
}
function getEnvironmentNames(env) {
try {
const names = env.names();
return names.map(name => {
return { name, value: convertValue(env.getVariable(name)) };
});
} catch (e) {
return [{name: "Unknown names",
value: "Exception thrown in getEnvironmentNames" }];
}
}
function getWindow() {
@ -670,6 +759,164 @@ function getWindow() {
return null;
}
// Maximum number of properties the server is interested in when previewing an
// object.
const OBJECT_PREVIEW_MAX_ITEMS = 10;
// When the replaying process pauses, the server needs to inspect a lot of state
// around frames, objects, etc. in order to fill in all the information the
// client needs to update the UI for the pause location. Done naively, this
// inspection requires a lot of back and forth with the replaying process to
// get all this data. This is bad for performance, and especially so if the
// replaying process is on a different machine from the server. Instead, the
// debugger running in the server can request a pause data packet which includes
// everything the server will need.
//
// This should avoid overapproximation, so that we can quickly send pause data
// across a network connection, and especially should not underapproximate
// as the server will end up needing to make more requests before the client can
// finish pausing.
function getPauseData() {
const numFrames = countScriptFrames();
if (!numFrames) {
return {};
}
const rv = {
frames: [],
scripts: {},
offsetMetadata: [],
objects: {},
environments: {},
};
function addValue(value, includeProperties) {
if (value && typeof value == "object" && value.object) {
addObject(value.object, includeProperties);
}
}
function addObject(id, includeProperties) {
if (!id) {
return;
}
// If includeProperties is set then previewing the object requires knowledge
// of its enumerable properties.
const needObject = !rv.objects[id];
const needProperties =
includeProperties &&
(needObject || !rv.objects[id].preview.enumerableOwnProperties);
if (!needObject && !needProperties) {
return;
}
const object = gPausedObjects.getObject(id);
assert(object instanceof Debugger.Object);
const properties = getObjectProperties(object);
const propertyEntries = Object.entries(properties);
if (needObject) {
rv.objects[id] = {
data: getObjectData(id),
preview: {
ownPropertyNamesCount: propertyEntries.length,
},
};
const preview = rv.objects[id].preview;
// Add some properties (if present) which the server might ask for
// even when it isn't interested in the rest of the properties.
if (properties.length) {
preview.lengthProperty = properties.length;
}
if (properties.displayName) {
preview.displayNameProperty = properties.displayName;
}
}
if (needProperties) {
const preview = rv.objects[id].preview;
// The server is only interested in enumerable properties, and at most
// OBJECT_PREVIEW_MAX_ITEMS of them. Limiting the properties we send to
// only those the server needs avoids having to send the contents of huge
// objects like Windows, most of which will not be used.
const enumerableOwnProperties = Object.create(null);
let enumerablePropertyCount = 0;
for (const [ name, desc ] of propertyEntries) {
if (desc.enumerable) {
enumerableOwnProperties[name] = desc;
addPropertyDescriptor(desc, false);
if (++enumerablePropertyCount == OBJECT_PREVIEW_MAX_ITEMS) {
break;
}
}
}
preview.enumerableOwnProperties = enumerableOwnProperties;
}
}
function addPropertyDescriptor(desc, includeProperties) {
if (desc.value) {
addValue(desc.value, includeProperties);
}
if (desc.get) {
addObject(desc.get, includeProperties);
}
if (desc.set) {
addObject(desc.set, includeProperties);
}
}
function addEnvironment(id) {
if (!id || rv.environments[id]) {
return;
}
const env = gPausedObjects.getObject(id);
assert(env instanceof Debugger.Environment);
const data = getObjectData(id);
const names = getEnvironmentNames(env);
rv.environments[id] = { data, names };
addEnvironment(data.parent);
}
// eslint-disable-next-line no-shadow
function addScript(id) {
if (!rv.scripts[id]) {
rv.scripts[id] = getScriptData(id);
}
}
for (let i = 0; i < numFrames; i++) {
const frame = getFrameData(i);
const script = gScripts.getObject(frame.script);
rv.frames.push(frame);
rv.offsetMetadata.push({
scriptId: frame.script,
offset: frame.offset,
metadata: script.getOffsetMetadata(frame.offset),
});
addScript(frame.script);
addValue(frame.this, true);
if (frame.arguments) {
for (const arg of frame.arguments) {
addValue(arg, true);
}
}
addObject(frame.callee, false);
addEnvironment(frame.environment, true);
}
return rv;
}
///////////////////////////////////////////////////////////////////////////////
// Handlers
///////////////////////////////////////////////////////////////////////////////
@ -733,47 +980,7 @@ const gRequestHandlers = {
},
getObject(request) {
const object = gPausedObjects.getObject(request.id);
if (object instanceof Debugger.Object) {
const rv = {
id: request.id,
kind: "Object",
callable: object.callable,
isBoundFunction: object.isBoundFunction,
isArrowFunction: object.isArrowFunction,
isGeneratorFunction: object.isGeneratorFunction,
isAsyncFunction: object.isAsyncFunction,
proto: getObjectId(object.proto),
class: object.class,
name: object.name,
displayName: object.displayName,
parameterNames: object.parameterNames,
script: gScripts.getId(object.script),
environment: getObjectId(object.environment),
isProxy: object.isProxy,
isExtensible: object.isExtensible(),
isSealed: object.isSealed(),
isFrozen: object.isFrozen(),
};
if (rv.isBoundFunction) {
rv.boundTargetFunction = getObjectId(object.boundTargetFunction);
rv.boundThis = convertValue(object.boundThis);
rv.boundArguments = getObjectId(makeDebuggeeValue(object.boundArguments));
}
return rv;
}
if (object instanceof Debugger.Environment) {
return {
id: request.id,
kind: "Environment",
type: object.type,
parent: getObjectId(object.parent),
object: object.type == "declarative" ? 0 : getObjectId(object.object),
callee: getObjectId(object.callee),
optimizedOut: object.optimizedOut,
};
}
throw new Error("Unknown object kind");
return getObjectData(request.id);
},
getObjectProperties(request) {
@ -785,18 +992,6 @@ const gRequestHandlers = {
return getObjectProperties(object);
},
objectProxyData(request) {
if (!RecordReplayControl.maybeDivergeFromRecording()) {
return { exception: "Recording divergence in unwrapObject" };
}
const obj = gPausedObjects.getObject(request.id);
return {
unwrapped: convertValue(obj.unwrap()),
target: convertValue(obj.proxyTarget),
handler: convertValue(obj.proxyHandler),
};
},
objectApply(request) {
if (!RecordReplayControl.maybeDivergeFromRecording()) {
return { throw: "Recording divergence in objectApply" };
@ -814,22 +1009,14 @@ const gRequestHandlers = {
value: "Recording divergence in getEnvironmentNames" }];
}
try {
const env = gPausedObjects.getObject(request.id);
const names = env.names();
return names.map(name => {
return { name, value: convertValue(env.getVariable(name)) };
});
} catch (e) {
return [{name: "Unknown names",
value: "Exception thrown in getEnvironmentNames" }];
}
const env = gPausedObjects.getObject(request.id);
return getEnvironmentNames(env);
},
getFrame(request) {
if (request.index == -1 /* NewestFrameIndex */) {
const numFrames = countScriptFrames();
if (!numFrames) {
// Return an empty object when there are no frames.
return {};
@ -837,28 +1024,15 @@ const gRequestHandlers = {
request.index = numFrames - 1;
}
const frame = scriptFrameForIndex(request.index);
return getFrameData(request.index);
},
let _arguments = null;
if (frame.arguments) {
_arguments = [];
for (let i = 0; i < frame.arguments.length; i++) {
_arguments.push(convertValue(frame.arguments[i]));
}
pauseData(request) {
if (!RecordReplayControl.maybeDivergeFromRecording()) {
return { error: "Recording divergence in pauseData" };
}
return {
index: request.index,
type: frame.type,
callee: getObjectId(frame.callee),
environment: getObjectId(frame.environment),
generator: frame.generator,
constructing: frame.constructing,
this: convertValue(frame.this),
script: gScripts.getId(frame.script),
offset: frame.offset,
arguments: _arguments,
};
return getPauseData();
},
getLineOffsets: forwardToScript("getLineOffsets"),

Просмотреть файл

@ -0,0 +1,144 @@
// |jit-test| skip-if: !wasmReftypesEnabled()
const {Module,Instance,Global,RuntimeError} = WebAssembly;
const badWasmFunc = /can only pass WebAssembly exported functions to funcref/;
const typeErr = /type mismatch/;
// Validation:
wasmEvalText(`(module (func (local anyref funcref) (local.set 0 (local.get 1))))`);
wasmEvalText(`(module (func (local funcref funcref) (local.set 0 (local.get 1))))`);
wasmEvalText(`(module (func (local funcref) (local.set 0 (ref.null))))`);
wasmFailValidateText(`(module (func (local funcref anyref) (local.set 0 (local.get 1))))`, typeErr);
wasmEvalText(`(module (global (mut funcref) (ref.null)) (func (param funcref) (global.set 0 (local.get 0))))`);
wasmEvalText(`(module (global (mut anyref) (ref.null)) (func (param funcref) (global.set 0 (local.get 0))))`);
wasmFailValidateText(`(module (global (mut funcref) (ref.null)) (func (param anyref) (global.set 0 (local.get 0))))`, typeErr);
wasmEvalText(`(module (func (param funcref)) (func (param funcref) (call 0 (local.get 0))))`);
wasmEvalText(`(module (func (param anyref)) (func (param funcref) (call 0 (local.get 0))))`);
wasmFailValidateText(`(module (func (param funcref)) (func (param anyref) (call 0 (local.get 0))))`, typeErr);
wasmEvalText(`(module (func (param funcref) (result funcref) (block funcref (local.get 0) (br 0))))`);
wasmEvalText(`(module (func (param funcref) (result anyref) (block anyref (local.get 0) (br 0))))`);
wasmFailValidateText(`(module (func (param anyref) (result anyref) (block funcref (local.get 0) (br 0))))`, typeErr);
wasmEvalText(`(module (func (param funcref funcref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
wasmEvalText(`(module (func (param anyref funcref) (result anyref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
wasmEvalText(`(module (func (param funcref anyref) (result anyref) (select (local.get 0) (local.get 1) (i32.const 0))))`);
wasmFailValidateText(`(module (func (param anyref funcref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`, typeErr);
wasmFailValidateText(`(module (func (param funcref anyref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`, typeErr);
// Runtime:
var m = new Module(wasmTextToBinary(`(module (func (export "wasmFun")))`));
const wasmFun1 = new Instance(m).exports.wasmFun;
const wasmFun2 = new Instance(m).exports.wasmFun;
const wasmFun3 = new Instance(m).exports.wasmFun;
var run = wasmEvalText(`(module
(global (mut funcref) (ref.null))
(func (param $x funcref) (param $test i32) (result funcref)
local.get $x
global.get 0
local.get $test
select
)
(func (export "run") (param $a funcref) (param $b funcref) (param $c funcref) (param $test1 i32) (param $test2 i32) (result funcref)
local.get $a
global.set 0
block funcref
local.get $b
local.get $test1
br_if 0
drop
local.get $c
end
local.get $test2
call 0
)
)`).exports.run;
assertEq(run(wasmFun1, wasmFun2, wasmFun3, false, false), wasmFun1);
assertEq(run(wasmFun1, wasmFun2, wasmFun3, true, false), wasmFun1);
assertEq(run(wasmFun1, wasmFun2, wasmFun3, true, true), wasmFun2);
assertEq(run(wasmFun1, wasmFun2, wasmFun3, false, true), wasmFun3);
var run = wasmEvalText(`(module
(type $t0 (func (param anyref) (result anyref)))
(type $t1 (func (param funcref) (result anyref)))
(type $t2 (func (param anyref) (result funcref)))
(type $t3 (func (param funcref funcref) (result funcref)))
(func $f0 (type $t0) ref.null)
(func $f1 (type $t1) ref.null)
(func $f2 (type $t2) ref.null)
(func $f3 (type $t3) ref.null)
(table funcref (elem $f0 $f1 $f2 $f3))
(func (export "run") (param i32 i32) (result anyref)
block $b3 block $b2 block $b1 block $b0
local.get 0
br_table $b0 $b1 $b2 $b3
end $b0
ref.null
local.get 1
call_indirect $t0
return
end $b1
ref.null
local.get 1
call_indirect $t1
return
end $b2
ref.null
local.get 1
call_indirect $t2
return
end $b3
ref.null
ref.null
local.get 1
call_indirect $t3
return
)
)`).exports.run;
for (var i = 0; i < 4; i++) {
for (var j = 0; j < 4; j++) {
if (i == j)
assertEq(run(i, j), null);
else
assertErrorMessage(() => run(i, j), RuntimeError, /indirect call signature mismatch/);
}
}
// JS API:
const wasmFun = wasmEvalText(`(module (func (export "x")))`).exports.x;
var run = wasmEvalText(`(module (func (export "run") (param funcref) (result funcref) (local.get 0)))`).exports.run;
assertEq(run(wasmFun), wasmFun);
assertEq(run(null), null);
assertErrorMessage(() => run(() => {}), TypeError, badWasmFunc);
var importReturnValue;
var importFun = () => importReturnValue;
var run = wasmEvalText(`(module (func (import "" "i") (result funcref)) (func (export "run") (result funcref) (call 0)))`, {'':{i:importFun}}).exports.run;
importReturnValue = wasmFun;
assertEq(run(), wasmFun);
importReturnValue = null;
assertEq(run(), null);
importReturnValue = undefined;
assertErrorMessage(() => run(), TypeError, badWasmFunc);
importReturnValue = () => {};
assertErrorMessage(() => run(), TypeError, badWasmFunc);
var g = new Global({value:'funcref', mutable:true}, wasmFun);
assertEq(g.value, wasmFun);
g.value = null;
assertEq(g.value, null);
Math.sin();
assertErrorMessage(() => g.value = () => {}, TypeError, badWasmFunc);
var g = new Global({value:'funcref', mutable:true}, null);
assertEq(g.value, null);
g.value = wasmFun;
assertEq(g.value, wasmFun);
assertErrorMessage(() => new Global({value:'funcref'}, () => {}), TypeError, badWasmFunc);

Просмотреть файл

@ -5,6 +5,8 @@ const Memory = WebAssembly.Memory;
const LinkError = WebAssembly.LinkError;
const RuntimeError = WebAssembly.RuntimeError;
const badFuncRefError = /can only pass WebAssembly exported functions to funcref/;
var callee = i => `(func $f${i} (result i32) (i32.const ${i}))`;
wasmFailValidateText(`(module (elem (i32.const 0) $f0) ${callee(0)})`, /elem segment requires a table section/);
@ -122,8 +124,8 @@ assertEq(e4.call(2), 13);
var asmjsFun = (function() { "use asm"; function f() {} return f })();
assertEq(isAsmJSFunction(asmjsFun), isAsmJSCompilationAvailable());
assertErrorMessage(() => tbl.set(0, asmjsFun), TypeError, /can only assign WebAssembly exported functions/);
assertErrorMessage(() => tbl.grow(1, asmjsFun), TypeError, /bad initializer to funcref table/);
assertErrorMessage(() => tbl.set(0, asmjsFun), TypeError, badFuncRefError);
assertErrorMessage(() => tbl.grow(1, asmjsFun), TypeError, badFuncRefError);
var m = new Module(wasmTextToBinary(`(module
(type $i2i (func (param i32) (result i32)))

Просмотреть файл

@ -179,7 +179,6 @@ static inline const MDefinition* GetObject(const MDefinition* ins) {
case MDefinition::Opcode::WasmLoadGlobalCell:
case MDefinition::Opcode::WasmStoreGlobalVar:
case MDefinition::Opcode::WasmStoreGlobalCell:
case MDefinition::Opcode::WasmLoadRef:
case MDefinition::Opcode::WasmStoreRef:
case MDefinition::Opcode::ArrayJoin:
case MDefinition::Opcode::ArraySlice:

Просмотреть файл

@ -7421,7 +7421,8 @@ void CodeGenerator::emitWasmCallBase(LWasmCallBase<Defs>* lir) {
break;
case wasm::CalleeDesc::BuiltinInstanceMethod:
masm.wasmCallBuiltinInstanceMethod(desc, mir->instanceArg(),
callee.builtin());
callee.builtin(),
mir->builtinMethodFailureMode());
switchRealm = false;
break;
}
@ -7530,10 +7531,6 @@ void CodeGenerator::visitWasmDerivedPointer(LWasmDerivedPointer* ins) {
masm.addPtr(Imm32(int32_t(ins->offset())), ToRegister(ins->output()));
}
void CodeGenerator::visitWasmLoadRef(LWasmLoadRef* lir) {
masm.loadPtr(Address(ToRegister(lir->ptr()), 0), ToRegister(lir->output()));
}
void CodeGenerator::visitWasmStoreRef(LWasmStoreRef* ins) {
Register tls = ToRegister(ins->tls());
Register valueAddr = ToRegister(ins->valueAddr());
@ -13918,6 +13915,7 @@ void CodeGenerator::emitIonToWasmCallBase(LIonToWasmCallBase<NumDefs>* lir) {
case wasm::ValType::I64:
case wasm::ValType::Ref:
case wasm::ValType::AnyRef:
case wasm::ValType::FuncRef:
// Don't forget to trace GC type arguments in TraceJitExitFrames
// when they're enabled.
MOZ_CRASH("unexpected argument type when calling from ion to wasm");
@ -13976,6 +13974,7 @@ void CodeGenerator::emitIonToWasmCallBase(LIonToWasmCallBase<NumDefs>* lir) {
break;
case wasm::ExprType::Ref:
case wasm::ExprType::AnyRef:
case wasm::ExprType::FuncRef:
case wasm::ExprType::I64:
// Don't forget to trace GC type return value in TraceJitExitFrames
// when they're enabled.
@ -14016,11 +14015,6 @@ void CodeGenerator::visitWasmNullConstant(LWasmNullConstant* lir) {
masm.xorPtr(ToRegister(lir->output()), ToRegister(lir->output()));
}
void CodeGenerator::visitIsNullPointer(LIsNullPointer* lir) {
masm.cmpPtrSet(Assembler::Equal, ToRegister(lir->value()), ImmWord(0),
ToRegister(lir->output()));
}
void CodeGenerator::visitWasmCompareAndSelect(LWasmCompareAndSelect* ins) {
bool cmpIs32bit = ins->compareType() == MCompare::Compare_Int32 ||
ins->compareType() == MCompare::Compare_UInt32;

Просмотреть файл

@ -4350,11 +4350,6 @@ void LIRGenerator::visitWasmDerivedPointer(MWasmDerivedPointer* ins) {
define(new (alloc()) LWasmDerivedPointer(base), ins);
}
void LIRGenerator::visitWasmLoadRef(MWasmLoadRef* ins) {
define(new (alloc()) LWasmLoadRef(useRegisterAtStart(ins->getOperand(0))),
ins);
}
void LIRGenerator::visitWasmStoreRef(MWasmStoreRef* ins) {
LAllocation tls = useRegister(ins->tls());
LAllocation valueAddr = useFixed(ins->valueAddr(), PreBarrierReg);
@ -4722,11 +4717,6 @@ void LIRGenerator::visitWasmNullConstant(MWasmNullConstant* ins) {
define(new (alloc()) LWasmNullConstant(), ins);
}
void LIRGenerator::visitIsNullPointer(MIsNullPointer* ins) {
define(new (alloc()) LIsNullPointer(useRegisterAtStart(ins->getOperand(0))),
ins);
}
void LIRGenerator::visitWasmFloatConstant(MWasmFloatConstant* ins) {
switch (ins->type()) {
case MIRType::Double:

Просмотреть файл

@ -4272,6 +4272,7 @@ IonBuilder::InliningResult IonBuilder::inlineWasmCall(CallInfo& callInfo,
break;
case wasm::ValType::I64:
case wasm::ValType::AnyRef:
case wasm::ValType::FuncRef:
case wasm::ValType::Ref:
MOZ_CRASH("impossible per above check");
case wasm::ValType::NullRef:

Просмотреть файл

@ -5451,8 +5451,9 @@ MWasmCall* MWasmCall::New(TempAllocator& alloc, const wasm::CallSiteDesc& desc,
MWasmCall* MWasmCall::NewBuiltinInstanceMethodCall(
TempAllocator& alloc, const wasm::CallSiteDesc& desc,
const wasm::SymbolicAddress builtin, const ABIArg& instanceArg,
const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned) {
const wasm::SymbolicAddress builtin, wasm::FailureMode failureMode,
const ABIArg& instanceArg, const Args& args, MIRType resultType,
uint32_t stackArgAreaSizeUnaligned) {
auto callee = wasm::CalleeDesc::builtinInstanceMethod(builtin);
MWasmCall* call = MWasmCall::New(alloc, desc, callee, args, resultType,
stackArgAreaSizeUnaligned, nullptr);
@ -5462,6 +5463,7 @@ MWasmCall* MWasmCall::NewBuiltinInstanceMethodCall(
MOZ_ASSERT(instanceArg != ABIArg());
call->instanceArg_ = instanceArg;
call->builtinMethodFailureMode_ = failureMode;
return call;
}

Просмотреть файл

@ -1570,29 +1570,6 @@ class MWasmNullConstant : public MNullaryInstruction {
ALLOW_CLONE(MWasmNullConstant)
};
class MIsNullPointer : public MUnaryInstruction, public NoTypePolicy::Data {
explicit MIsNullPointer(MDefinition* value)
: MUnaryInstruction(classOpcode, value) {
MOZ_ASSERT(value->type() == MIRType::Pointer);
setResultType(MIRType::Boolean);
setMovable();
}
public:
INSTRUCTION_HEADER(IsNullPointer);
static MIsNullPointer* New(TempAllocator& alloc, MDefinition* value) {
return new (alloc) MIsNullPointer(value);
}
bool congruentTo(const MDefinition* ins) const override {
return congruentIfOperandsEqual(ins);
}
AliasSet getAliasSet() const override { return AliasSet::None(); }
ALLOW_CLONE(MIsNullPointer)
};
// Floating-point value as created by wasm. Just a constant value, used to
// effectively inhibite all the MIR optimizations. This uses the same LIR nodes
// as a MConstant of the same type would.
@ -11798,31 +11775,6 @@ class MWasmDerivedPointer : public MUnaryInstruction,
ALLOW_CLONE(MWasmDerivedPointer)
};
class MWasmLoadRef : public MUnaryInstruction, public NoTypePolicy::Data {
AliasSet::Flag aliasSet_;
explicit MWasmLoadRef(MDefinition* valueAddr, AliasSet::Flag aliasSet,
bool isMovable = true)
: MUnaryInstruction(classOpcode, valueAddr), aliasSet_(aliasSet) {
MOZ_ASSERT(valueAddr->type() == MIRType::Pointer);
setResultType(MIRType::RefOrNull);
if (isMovable) {
setMovable();
}
}
public:
INSTRUCTION_HEADER(WasmLoadRef)
TRIVIAL_NEW_WRAPPERS
bool congruentTo(const MDefinition* ins) const override {
return congruentIfOperandsEqual(ins);
}
AliasSet getAliasSet() const override { return AliasSet::Load(aliasSet_); }
ALLOW_CLONE(MWasmLoadRef)
};
class MWasmStoreRef : public MAryInstruction<3>, public NoTypePolicy::Data {
AliasSet::Flag aliasSet_;
@ -11897,6 +11849,7 @@ class MWasmStackArg : public MUnaryInstruction, public NoTypePolicy::Data {
class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
wasm::CallSiteDesc desc_;
wasm::CalleeDesc callee_;
wasm::FailureMode builtinMethodFailureMode_;
FixedList<AnyRegister> argRegs_;
uint32_t stackArgAreaSizeUnaligned_;
ABIArg instanceArg_;
@ -11906,6 +11859,7 @@ class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
: MVariadicInstruction(classOpcode),
desc_(desc),
callee_(callee),
builtinMethodFailureMode_(wasm::FailureMode::Infallible),
stackArgAreaSizeUnaligned_(stackArgAreaSizeUnaligned) {}
public:
@ -11925,8 +11879,9 @@ class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
static MWasmCall* NewBuiltinInstanceMethodCall(
TempAllocator& alloc, const wasm::CallSiteDesc& desc,
const wasm::SymbolicAddress builtin, const ABIArg& instanceArg,
const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned);
const wasm::SymbolicAddress builtin, wasm::FailureMode failureMode,
const ABIArg& instanceArg, const Args& args, MIRType resultType,
uint32_t stackArgAreaSizeUnaligned);
size_t numArgs() const { return argRegs_.length(); }
AnyRegister registerForArg(size_t index) const {
@ -11935,6 +11890,10 @@ class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data {
}
const wasm::CallSiteDesc& desc() const { return desc_; }
const wasm::CalleeDesc& callee() const { return callee_; }
wasm::FailureMode builtinMethodFailureMode() const {
MOZ_ASSERT(callee_.which() == wasm::CalleeDesc::BuiltinInstanceMethod);
return builtinMethodFailureMode_;
}
uint32_t stackArgAreaSizeUnaligned() const {
return stackArgAreaSizeUnaligned_;
}

Просмотреть файл

@ -3191,7 +3191,7 @@ CodeOffset MacroAssembler::wasmCallImport(const wasm::CallSiteDesc& desc,
CodeOffset MacroAssembler::wasmCallBuiltinInstanceMethod(
const wasm::CallSiteDesc& desc, const ABIArg& instanceArg,
wasm::SymbolicAddress builtin) {
wasm::SymbolicAddress builtin, wasm::FailureMode failureMode) {
MOZ_ASSERT(instanceArg != ABIArg());
if (instanceArg.kind() == ABIArg::GPR) {
@ -3207,7 +3207,31 @@ CodeOffset MacroAssembler::wasmCallBuiltinInstanceMethod(
MOZ_CRASH("Unknown abi passing style for pointer");
}
return call(desc, builtin);
CodeOffset ret = call(desc, builtin);
if (failureMode != wasm::FailureMode::Infallible) {
Label noTrap;
switch (failureMode) {
case wasm::FailureMode::Infallible:
MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE();
case wasm::FailureMode::FailOnNegI32:
branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap);
break;
case wasm::FailureMode::FailOnNullPtr:
branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &noTrap);
break;
case wasm::FailureMode::FailOnInvalidRef:
branchPtr(Assembler::NotEqual, ReturnReg,
ImmWord(uintptr_t(wasm::AnyRef::invalid().forCompiledCode())),
&noTrap);
break;
}
wasmTrap(wasm::Trap::ThrowReported,
wasm::BytecodeOffset(desc.lineOrBytecode()));
bind(&noTrap);
}
return ret;
}
CodeOffset MacroAssembler::wasmCallIndirect(const wasm::CallSiteDesc& desc,

Просмотреть файл

@ -1927,7 +1927,8 @@ class MacroAssembler : public MacroAssemblerSpecific {
// (TLS & pinned regs are non-volatile registers in the system ABI).
CodeOffset wasmCallBuiltinInstanceMethod(const wasm::CallSiteDesc& desc,
const ABIArg& instanceArg,
wasm::SymbolicAddress builtin);
wasm::SymbolicAddress builtin,
wasm::FailureMode failureMode);
// As enterFakeExitFrame(), but using register conventions appropriate for
// wasm stubs.

Просмотреть файл

@ -6682,17 +6682,6 @@ class LWasmDerivedPointer : public LInstructionHelper<1, 1, 0> {
size_t offset() { return mirRaw()->toWasmDerivedPointer()->offset(); }
};
class LWasmLoadRef : public LInstructionHelper<1, 1, 0> {
public:
LIR_HEADER(WasmLoadRef);
explicit LWasmLoadRef(const LAllocation& ptr)
: LInstructionHelper(classOpcode) {
setOperand(0, ptr);
}
MWasmLoadRef* mir() const { return mirRaw()->toWasmLoadRef(); }
const LAllocation* ptr() { return getOperand(0); }
};
class LWasmStoreRef : public LInstructionHelper<0, 3, 1> {
public:
LIR_HEADER(WasmStoreRef);
@ -6782,17 +6771,6 @@ class LWasmNullConstant : public LInstructionHelper<1, 0, 0> {
explicit LWasmNullConstant() : LInstructionHelper(classOpcode) {}
};
class LIsNullPointer : public LInstructionHelper<1, 1, 0> {
public:
LIR_HEADER(IsNullPointer);
explicit LIsNullPointer(const LAllocation& value)
: LInstructionHelper(classOpcode) {
setOperand(0, value);
}
MIsNullPointer* mir() const { return mirRaw()->toIsNullPointer(); }
const LAllocation* value() { return getOperand(0); }
};
template <size_t Defs>
class LWasmCallBase : public LVariadicInstruction<Defs, 0> {
using Base = LVariadicInstruction<Defs, 0>;

Просмотреть файл

@ -401,7 +401,6 @@ MSG_DEF(JSMSG_WASM_DROPPED_ELEM_SEG, 0, JSEXN_WASMRUNTIMEERROR, "use of droppe
MSG_DEF(JSMSG_WASM_DEREF_NULL, 0, JSEXN_WASMRUNTIMEERROR, "dereferencing null pointer")
MSG_DEF(JSMSG_WASM_BAD_RANGE , 2, JSEXN_RANGEERR, "bad {0} {1}")
MSG_DEF(JSMSG_WASM_BAD_GROW, 1, JSEXN_RANGEERR, "failed to grow {0}")
MSG_DEF(JSMSG_WASM_BAD_TBL_GROW_INIT, 1, JSEXN_TYPEERR, "bad initializer to {0} table")
MSG_DEF(JSMSG_WASM_TABLE_OUT_OF_BOUNDS, 0, JSEXN_RANGEERR, "table index out of bounds")
MSG_DEF(JSMSG_WASM_BAD_UINT32, 2, JSEXN_TYPEERR, "bad {0} {1}")
MSG_DEF(JSMSG_WASM_BAD_BUF_ARG, 0, JSEXN_TYPEERR, "first argument must be an ArrayBuffer or typed array object")
@ -412,7 +411,7 @@ MSG_DEF(JSMSG_WASM_BAD_ELEMENT, 0, JSEXN_TYPEERR, "\"element\" proper
MSG_DEF(JSMSG_WASM_BAD_ELEMENT_GENERALIZED, 0, JSEXN_TYPEERR, "\"element\" property of table descriptor must be \"funcref\" or \"anyref\"")
MSG_DEF(JSMSG_WASM_BAD_IMPORT_ARG, 0, JSEXN_TYPEERR, "second argument must be an object")
MSG_DEF(JSMSG_WASM_BAD_IMPORT_FIELD, 1, JSEXN_TYPEERR, "import object field '{0}' is not an Object")
MSG_DEF(JSMSG_WASM_BAD_TABLE_VALUE, 0, JSEXN_TYPEERR, "can only assign WebAssembly exported functions to Table")
MSG_DEF(JSMSG_WASM_BAD_FUNCREF_VALUE, 0, JSEXN_TYPEERR, "can only pass WebAssembly exported functions to funcref")
MSG_DEF(JSMSG_WASM_BAD_I64_TYPE, 0, JSEXN_TYPEERR, "cannot pass i64 to or from JS")
MSG_DEF(JSMSG_WASM_BAD_GLOBAL_TYPE, 0, JSEXN_TYPEERR, "bad type for a WebAssembly.Global")
MSG_DEF(JSMSG_WASM_NO_TRANSFER, 0, JSEXN_TYPEERR, "cannot transfer WebAssembly/asm.js ArrayBuffer")

Просмотреть файл

@ -2019,7 +2019,7 @@ class MOZ_STACK_CLASS JS_HAZ_ROOTED ModuleValidator
}
env_.asmJSSigToTableIndex[sigIndex] = env_.tables.length();
if (!env_.tables.emplaceBack(TableKind::TypedFunction, Limits(mask + 1))) {
if (!env_.tables.emplaceBack(TableKind::AsmJS, Limits(mask + 1))) {
return false;
}
@ -6562,7 +6562,8 @@ static bool ValidateGlobalVariable(JSContext* cx, const AsmJSGlobal& global,
}
case ValType::Ref:
case ValType::NullRef:
case ValType::AnyRef: {
case ValType::AnyRef:
case ValType::FuncRef: {
MOZ_CRASH("not available in asm.js");
}
}

Просмотреть файл

@ -127,9 +127,11 @@ class AstValType {
}
}
bool isRefType() const {
#ifdef ENABLE_WASM_GC
bool isNarrowType() const {
return code() == ValType::AnyRef || code() == ValType::Ref;
}
#endif
bool isValid() const { return !(which_ == IsValType && !type_.isValid()); }

Просмотреть файл

@ -1056,6 +1056,7 @@ void BaseLocalIter::settle() {
case ValType::F32:
case ValType::F64:
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
// TODO/AnyRef-boxing: With boxed immediates and strings, the
// debugger must be made aware that AnyRef != Pointer.
@ -2783,6 +2784,7 @@ class BaseCompiler final : public BaseCompilerInterface {
case ExprType::I64:
needI64(joinRegI64_);
break;
case ExprType::FuncRef:
case ExprType::AnyRef:
case ExprType::NullRef:
case ExprType::Ref:
@ -2800,6 +2802,7 @@ class BaseCompiler final : public BaseCompilerInterface {
case ExprType::I64:
freeI64(joinRegI64_);
break;
case ExprType::FuncRef:
case ExprType::AnyRef:
case ExprType::NullRef:
case ExprType::Ref:
@ -2825,6 +2828,7 @@ class BaseCompiler final : public BaseCompilerInterface {
break;
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
needRef(joinRegPtr_);
break;
@ -2849,6 +2853,7 @@ class BaseCompiler final : public BaseCompilerInterface {
break;
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
freeRef(joinRegPtr_);
break;
@ -3778,6 +3783,7 @@ class BaseCompiler final : public BaseCompilerInterface {
}
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef: {
DebugOnly<Stk::Kind> k(stk_.back().kind());
MOZ_ASSERT(k == Stk::RegisterRef || k == Stk::ConstRef ||
@ -3816,6 +3822,7 @@ class BaseCompiler final : public BaseCompilerInterface {
return Some(AnyReg(joinRegF64_));
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef:
MOZ_ASSERT(isAvailableRef(joinRegPtr_));
needRef(joinRegPtr_);
@ -4239,6 +4246,7 @@ class BaseCompiler final : public BaseCompilerInterface {
masm.storeFloat32(RegF32(ReturnFloat32Reg), resultsAddress);
break;
case ExprType::Ref:
case ExprType::FuncRef:
case ExprType::AnyRef:
masm.storePtr(RegPtr(ReturnReg), resultsAddress);
break;
@ -4269,6 +4277,7 @@ class BaseCompiler final : public BaseCompilerInterface {
masm.loadFloat32(resultsAddress, RegF32(ReturnFloat32Reg));
break;
case ExprType::Ref:
case ExprType::FuncRef:
case ExprType::AnyRef:
masm.loadPtr(resultsAddress, RegPtr(ReturnReg));
break;
@ -4581,6 +4590,7 @@ class BaseCompiler final : public BaseCompilerInterface {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
ABIArg argLoc = call->abi.next(MIRType::RefOrNull);
if (argLoc.kind() == ABIArg::Stack) {
@ -4638,14 +4648,15 @@ class BaseCompiler final : public BaseCompilerInterface {
return callSymbolic(builtin, call);
}
CodeOffset builtinInstanceMethodCall(SymbolicAddress builtin,
CodeOffset builtinInstanceMethodCall(const SymbolicAddressSignature& builtin,
const ABIArg& instanceArg,
const FunctionCall& call) {
// Builtin method calls assume the TLS register has been set.
masm.loadWasmTlsRegFromFrame();
CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Symbolic);
return masm.wasmCallBuiltinInstanceMethod(desc, instanceArg, builtin);
return masm.wasmCallBuiltinInstanceMethod(
desc, instanceArg, builtin.identity, builtin.failureMode);
}
//////////////////////////////////////////////////////////////////////
@ -8553,6 +8564,7 @@ void BaseCompiler::doReturn(ExprType type, bool popStack) {
}
case ExprType::Ref:
case ExprType::NullRef:
case ExprType::FuncRef:
case ExprType::AnyRef: {
RegPtr rv = popRef(RegPtr(ReturnReg));
returnCleanup(popStack);
@ -8995,6 +9007,7 @@ bool BaseCompiler::emitGetLocal() {
pushLocalF32(slot);
break;
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
pushLocalRef(slot);
break;
@ -9059,6 +9072,7 @@ bool BaseCompiler::emitSetOrTeeLocal(uint32_t slot) {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr rv = popRef();
syncLocal(slot);
@ -9124,12 +9138,12 @@ bool BaseCompiler::emitGetGlobal() {
pushF64(value.f64());
break;
case ValType::Ref:
case ValType::NullRef:
pushRef(intptr_t(value.ref()));
break;
case ValType::FuncRef:
case ValType::AnyRef:
pushRef(intptr_t(value.anyref().forCompiledCode()));
pushRef(intptr_t(value.ref().forCompiledCode()));
break;
case ValType::NullRef:
MOZ_CRASH("NullRef not expressible");
default:
MOZ_CRASH("Global constant type");
}
@ -9166,6 +9180,7 @@ bool BaseCompiler::emitGetGlobal() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr rv = needRef();
ScratchI32 tmp(*this);
@ -9225,6 +9240,7 @@ bool BaseCompiler::emitSetGlobal() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr valueAddr(PreBarrierReg);
needRef(valueAddr);
@ -9632,6 +9648,7 @@ bool BaseCompiler::emitSelect() {
}
case ValType::Ref:
case ValType::NullRef:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr r, rs;
pop2xRef(&r, &rs);
@ -9785,7 +9802,7 @@ bool BaseCompiler::emitInstanceCall(uint32_t lineOrBytecode,
passArg(t, peek(numNonInstanceArgs - i), &baselineCall);
}
CodeOffset raOffset =
builtinInstanceMethodCall(builtin.identity, instanceArg, baselineCall);
builtinInstanceMethodCall(builtin, instanceArg, baselineCall);
if (!createStackMap("emitInstanceCall", raOffset)) {
return false;
}
@ -10147,7 +10164,6 @@ bool BaseCompiler::emitWait(ValType type, uint32_t byteSize) {
return true;
}
// Returns -1 on trap, otherwise nonnegative result.
switch (type.code()) {
case ValType::I32:
if (!emitInstanceCall(lineOrBytecode, SASigWaitI32)) {
@ -10163,11 +10179,6 @@ bool BaseCompiler::emitWait(ValType type, uint32_t byteSize) {
MOZ_CRASH();
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
@ -10184,17 +10195,7 @@ bool BaseCompiler::emitWake() {
return true;
}
// Returns -1 on trap, otherwise nonnegative result.
if (!emitInstanceCall(lineOrBytecode, SASigWake)) {
return false;
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
return emitInstanceCall(lineOrBytecode, SASigWake);
}
#ifdef ENABLE_WASM_BULKMEM_OPS
@ -10213,7 +10214,6 @@ bool BaseCompiler::emitMemOrTableCopy(bool isMem) {
return true;
}
// Returns -1 on trap, otherwise 0.
if (isMem) {
MOZ_ASSERT(srcMemOrTableIndex == 0);
MOZ_ASSERT(dstMemOrTableIndex == 0);
@ -10230,11 +10230,6 @@ bool BaseCompiler::emitMemOrTableCopy(bool isMem) {
}
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
@ -10251,21 +10246,11 @@ bool BaseCompiler::emitDataOrElemDrop(bool isData) {
}
// Despite the cast to int32_t, the callee regards the value as unsigned.
//
// Returns -1 on trap, otherwise 0.
pushI32(int32_t(segIndex));
const SymbolicAddressSignature& callee =
isData ? SASigDataDrop : SASigElemDrop;
if (!emitInstanceCall(lineOrBytecode, callee, /*pushReturnedValue=*/false)) {
return false;
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
return emitInstanceCall(lineOrBytecode,
isData ? SASigDataDrop : SASigElemDrop,
/*pushReturnedValue=*/false);
}
bool BaseCompiler::emitMemFill() {
@ -10280,18 +10265,8 @@ bool BaseCompiler::emitMemFill() {
return true;
}
// Returns -1 on trap, otherwise 0.
if (!emitInstanceCall(lineOrBytecode, SASigMemFill,
/*pushReturnedValue=*/false)) {
return false;
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
return emitInstanceCall(lineOrBytecode, SASigMemFill,
/*pushReturnedValue=*/false);
}
bool BaseCompiler::emitMemOrTableInit(bool isMem) {
@ -10309,7 +10284,6 @@ bool BaseCompiler::emitMemOrTableInit(bool isMem) {
return true;
}
// Returns -1 on trap, otherwise 0.
pushI32(int32_t(segIndex));
if (isMem) {
if (!emitInstanceCall(lineOrBytecode, SASigMemInit,
@ -10324,11 +10298,6 @@ bool BaseCompiler::emitMemOrTableInit(bool isMem) {
}
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
}
#endif
@ -10348,20 +10317,9 @@ bool BaseCompiler::emitTableFill() {
}
// fill(start:u32, val:ref, len:u32, table:u32) -> u32
//
// Returns -1 on trap, otherwise 0.
pushI32(tableIndex);
if (!emitInstanceCall(lineOrBytecode, SASigTableFill,
/*pushReturnedValue=*/false)) {
return false;
}
Label ok;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
return true;
return emitInstanceCall(lineOrBytecode, SASigTableFill,
/*pushReturnedValue=*/false);
}
MOZ_MUST_USE
@ -10375,21 +10333,12 @@ bool BaseCompiler::emitTableGet() {
if (deadCode_) {
return true;
}
// get(index:u32, table:u32) -> void*
//
// Returns nullptr for error, otherwise a pointer to a nonmoveable memory
// location that holds the anyref value.
// get(index:u32, table:u32) -> uintptr_t(AnyRef)
pushI32(tableIndex);
if (!emitInstanceCall(lineOrBytecode, SASigTableGet,
/*pushReturnedValue=*/false)) {
return false;
}
Label noTrap;
masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &noTrap);
trap(Trap::ThrowReported);
masm.bind(&noTrap);
masm.loadPtr(Address(ReturnReg, 0), ReturnReg);
// Push the resulting anyref back on the eval stack. NOTE: needRef() must
// not kill the value in the register.
@ -10413,8 +10362,6 @@ bool BaseCompiler::emitTableGrow() {
return true;
}
// grow(initValue:anyref, delta:u32, table:u32) -> u32
//
// infallible.
pushI32(tableIndex);
return emitInstanceCall(lineOrBytecode, SASigTableGrow);
}
@ -10431,18 +10378,9 @@ bool BaseCompiler::emitTableSet() {
return true;
}
// set(index:u32, value:ref, table:u32) -> i32
//
// Returns -1 on range error, otherwise 0 (which is then ignored).
pushI32(tableIndex);
if (!emitInstanceCall(lineOrBytecode, SASigTableSet,
/*pushReturnedValue=*/false)) {
return false;
}
Label noTrap;
masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap);
trap(Trap::ThrowReported);
masm.bind(&noTrap);
return true;
return emitInstanceCall(lineOrBytecode, SASigTableSet,
/*pushReturnedValue=*/false);
}
MOZ_MUST_USE
@ -10456,8 +10394,6 @@ bool BaseCompiler::emitTableSize() {
return true;
}
// size(table:u32) -> u32
//
// infallible.
pushI32(tableIndex);
return emitInstanceCall(lineOrBytecode, SASigTableSize);
}
@ -10487,13 +10423,6 @@ bool BaseCompiler::emitStructNew() {
return false;
}
// Null pointer check.
Label ok;
masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &ok);
trap(Trap::ThrowReported);
masm.bind(&ok);
// As many arguments as there are fields.
MOZ_ASSERT(args.length() == structType.fields_.length());
@ -10545,6 +10474,7 @@ bool BaseCompiler::emitStructNew() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr value = popRef();
masm.storePtr(value, Address(rdata, offs));
@ -10662,6 +10592,7 @@ bool BaseCompiler::emitStructGet() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
RegPtr r = needRef();
masm.loadPtr(Address(rp, offs), r);
@ -10723,6 +10654,7 @@ bool BaseCompiler::emitStructSet() {
rd = popF64();
break;
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
rr = popRef();
break;
@ -10766,6 +10698,7 @@ bool BaseCompiler::emitStructSet() {
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
masm.computeEffectiveAddress(Address(rp, offs), valueAddr);
// emitBarrieredStore consumes valueAddr
@ -10802,6 +10735,10 @@ bool BaseCompiler::emitStructNarrow() {
return true;
}
// Currently not supported by struct.narrow validation.
MOZ_ASSERT(inputType != ValType::FuncRef);
MOZ_ASSERT(outputType != ValType::FuncRef);
// AnyRef -> AnyRef is a no-op, just leave the value on the stack.
if (inputType == ValType::AnyRef && outputType == ValType::AnyRef) {
@ -10815,8 +10752,6 @@ bool BaseCompiler::emitStructNarrow() {
bool mustUnboxAnyref = inputType == ValType::AnyRef;
// Dynamic downcast (ref T) -> (ref U), leaves rp or null
//
// Infallible.
const StructType& outputStruct =
env_.types[outputType.refTypeIndex()].structType();

Просмотреть файл

@ -56,94 +56,138 @@ static const unsigned BUILTIN_THUNK_LIFO_SIZE = 64 * 1024;
#define _RoN MIRType::RefOrNull
#define _VOID MIRType::None
#define _END MIRType::None
#define _Infallible FailureMode::Infallible
#define _FailOnNegI32 FailureMode::FailOnNegI32
#define _FailOnNullPtr FailureMode::FailOnNullPtr
#define _FailOnInvalidRef FailureMode::FailOnInvalidRef
namespace js {
namespace wasm {
const SymbolicAddressSignature SASigSinD = {
SymbolicAddress::SinD, _F64, 1, {_F64, _END}};
SymbolicAddress::SinD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigCosD = {
SymbolicAddress::CosD, _F64, 1, {_F64, _END}};
SymbolicAddress::CosD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigTanD = {
SymbolicAddress::TanD, _F64, 1, {_F64, _END}};
SymbolicAddress::TanD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigASinD = {
SymbolicAddress::ASinD, _F64, 1, {_F64, _END}};
SymbolicAddress::ASinD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigACosD = {
SymbolicAddress::ACosD, _F64, 1, {_F64, _END}};
SymbolicAddress::ACosD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigATanD = {
SymbolicAddress::ATanD, _F64, 1, {_F64, _END}};
SymbolicAddress::ATanD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigCeilD = {
SymbolicAddress::CeilD, _F64, 1, {_F64, _END}};
SymbolicAddress::CeilD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigCeilF = {
SymbolicAddress::CeilF, _F32, 1, {_F32, _END}};
SymbolicAddress::CeilF, _F32, _Infallible, 1, {_F32, _END}};
const SymbolicAddressSignature SASigFloorD = {
SymbolicAddress::FloorD, _F64, 1, {_F64, _END}};
SymbolicAddress::FloorD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigFloorF = {
SymbolicAddress::FloorF, _F32, 1, {_F32, _END}};
SymbolicAddress::FloorF, _F32, _Infallible, 1, {_F32, _END}};
const SymbolicAddressSignature SASigTruncD = {
SymbolicAddress::TruncD, _F64, 1, {_F64, _END}};
SymbolicAddress::TruncD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigTruncF = {
SymbolicAddress::TruncF, _F32, 1, {_F32, _END}};
SymbolicAddress::TruncF, _F32, _Infallible, 1, {_F32, _END}};
const SymbolicAddressSignature SASigNearbyIntD = {
SymbolicAddress::NearbyIntD, _F64, 1, {_F64, _END}};
SymbolicAddress::NearbyIntD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigNearbyIntF = {
SymbolicAddress::NearbyIntF, _F32, 1, {_F32, _END}};
SymbolicAddress::NearbyIntF, _F32, _Infallible, 1, {_F32, _END}};
const SymbolicAddressSignature SASigExpD = {
SymbolicAddress::ExpD, _F64, 1, {_F64, _END}};
SymbolicAddress::ExpD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigLogD = {
SymbolicAddress::LogD, _F64, 1, {_F64, _END}};
SymbolicAddress::LogD, _F64, _Infallible, 1, {_F64, _END}};
const SymbolicAddressSignature SASigPowD = {
SymbolicAddress::PowD, _F64, 2, {_F64, _F64, _END}};
SymbolicAddress::PowD, _F64, _Infallible, 2, {_F64, _F64, _END}};
const SymbolicAddressSignature SASigATan2D = {
SymbolicAddress::ATan2D, _F64, 2, {_F64, _F64, _END}};
SymbolicAddress::ATan2D, _F64, _Infallible, 2, {_F64, _F64, _END}};
const SymbolicAddressSignature SASigMemoryGrow = {
SymbolicAddress::MemoryGrow, _I32, 2, {_PTR, _I32, _END}};
SymbolicAddress::MemoryGrow, _I32, _Infallible, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigMemorySize = {
SymbolicAddress::MemorySize, _I32, 1, {_PTR, _END}};
const SymbolicAddressSignature SASigWaitI32 = {
SymbolicAddress::WaitI32, _I32, 4, {_PTR, _I32, _I32, _I64, _END}};
const SymbolicAddressSignature SASigWaitI64 = {
SymbolicAddress::WaitI64, _I32, 4, {_PTR, _I32, _I64, _I64, _END}};
SymbolicAddress::MemorySize, _I32, _Infallible, 1, {_PTR, _END}};
const SymbolicAddressSignature SASigWaitI32 = {SymbolicAddress::WaitI32,
_I32,
_FailOnNegI32,
4,
{_PTR, _I32, _I32, _I64, _END}};
const SymbolicAddressSignature SASigWaitI64 = {SymbolicAddress::WaitI64,
_I32,
_FailOnNegI32,
4,
{_PTR, _I32, _I64, _I64, _END}};
const SymbolicAddressSignature SASigWake = {
SymbolicAddress::Wake, _I32, 3, {_PTR, _I32, _I32, _END}};
const SymbolicAddressSignature SASigMemCopy = {
SymbolicAddress::MemCopy, _I32, 4, {_PTR, _I32, _I32, _I32, _END}};
SymbolicAddress::Wake, _I32, _FailOnNegI32, 3, {_PTR, _I32, _I32, _END}};
const SymbolicAddressSignature SASigMemCopy = {SymbolicAddress::MemCopy,
_VOID,
_FailOnNegI32,
4,
{_PTR, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigDataDrop = {
SymbolicAddress::DataDrop, _I32, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigMemFill = {
SymbolicAddress::MemFill, _I32, 4, {_PTR, _I32, _I32, _I32, _END}};
SymbolicAddress::DataDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigMemFill = {SymbolicAddress::MemFill,
_VOID,
_FailOnNegI32,
4,
{_PTR, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigMemInit = {
SymbolicAddress::MemInit, _I32, 5, {_PTR, _I32, _I32, _I32, _I32, _END}};
SymbolicAddress::MemInit,
_VOID,
_FailOnNegI32,
5,
{_PTR, _I32, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableCopy = {
SymbolicAddress::TableCopy,
_I32,
_VOID,
_FailOnNegI32,
6,
{_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigElemDrop = {
SymbolicAddress::ElemDrop, _I32, 2, {_PTR, _I32, _END}};
SymbolicAddress::ElemDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigTableFill = {
SymbolicAddress::TableFill, _I32, 5, {_PTR, _I32, _RoN, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableGet = {
SymbolicAddress::TableGet, _PTR, 3, {_PTR, _I32, _I32, _END}};
SymbolicAddress::TableFill,
_VOID,
_FailOnNegI32,
5,
{_PTR, _I32, _RoN, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableGet = {SymbolicAddress::TableGet,
_RoN,
_FailOnInvalidRef,
3,
{_PTR, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableGrow = {
SymbolicAddress::TableGrow, _I32, 4, {_PTR, _RoN, _I32, _I32, _END}};
SymbolicAddress::TableGrow,
_I32,
_Infallible,
4,
{_PTR, _RoN, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableInit = {
SymbolicAddress::TableInit,
_I32,
_VOID,
_FailOnNegI32,
6,
{_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
const SymbolicAddressSignature SASigTableSet = {
SymbolicAddress::TableSet, _I32, 4, {_PTR, _I32, _RoN, _I32, _END}};
const SymbolicAddressSignature SASigTableSet = {SymbolicAddress::TableSet,
_VOID,
_FailOnNegI32,
4,
{_PTR, _I32, _RoN, _I32, _END}};
const SymbolicAddressSignature SASigTableSize = {
SymbolicAddress::TableSize, _I32, 2, {_PTR, _I32, _END}};
SymbolicAddress::TableSize, _I32, _Infallible, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigPostBarrier = {
SymbolicAddress::PostBarrier, _VOID, 2, {_PTR, _PTR, _END}};
SymbolicAddress::PostBarrier, _VOID, _Infallible, 2, {_PTR, _PTR, _END}};
const SymbolicAddressSignature SASigPostBarrierFiltering = {
SymbolicAddress::PostBarrierFiltering, _VOID, 2, {_PTR, _PTR, _END}};
SymbolicAddress::PostBarrierFiltering,
_VOID,
_Infallible,
2,
{_PTR, _PTR, _END}};
const SymbolicAddressSignature SASigStructNew = {
SymbolicAddress::StructNew, _RoN, 2, {_PTR, _I32, _END}};
SymbolicAddress::StructNew, _RoN, _FailOnNullPtr, 2, {_PTR, _I32, _END}};
const SymbolicAddressSignature SASigStructNarrow = {
SymbolicAddress::StructNarrow, _RoN, 4, {_PTR, _I32, _I32, _RoN, _END}};
SymbolicAddress::StructNarrow,
_RoN,
_Infallible,
4,
{_PTR, _I32, _I32, _RoN, _END}};
} // namespace wasm
} // namespace js
@ -156,6 +200,9 @@ const SymbolicAddressSignature SASigStructNarrow = {
#undef _RoN
#undef _VOID
#undef _END
#undef _Infallible
#undef _FailOnNegI32
#undef _FailOnNullPtr
// ============================================================================
// WebAssembly builtin C++ functions called from wasm code to implement internal
@ -619,6 +666,9 @@ void* wasm::AddressOf(SymbolicAddress imm, ABIFunctionType* abiType) {
case SymbolicAddress::CallImport_F64:
*abiType = Args_General4;
return FuncCast(Instance::callImport_f64, *abiType);
case SymbolicAddress::CallImport_FuncRef:
*abiType = Args_General4;
return FuncCast(Instance::callImport_funcref, *abiType);
case SymbolicAddress::CallImport_AnyRef:
*abiType = Args_General4;
return FuncCast(Instance::callImport_anyref, *abiType);
@ -837,6 +887,7 @@ bool wasm::NeedsBuiltinThunk(SymbolicAddress sym) {
case SymbolicAddress::CallImport_I32:
case SymbolicAddress::CallImport_I64:
case SymbolicAddress::CallImport_F64:
case SymbolicAddress::CallImport_FuncRef:
case SymbolicAddress::CallImport_AnyRef:
case SymbolicAddress::CoerceInPlace_ToInt32: // GenerateImportJitExit
case SymbolicAddress::CoerceInPlace_ToNumber:

Просмотреть файл

@ -49,7 +49,7 @@ enum class TypeCode {
F64 = 0x7c, // SLEB128(-0x04)
// A function pointer with any signature
AnyFunc = 0x70, // SLEB128(-0x10)
FuncRef = 0x70, // SLEB128(-0x10)
// A reference to any type.
AnyRef = 0x6f,

Просмотреть файл

@ -436,9 +436,9 @@ size_t global_tlsOffset(const GlobalDesc* global) {
// TableDesc
size_t table_tlsOffset(const TableDesc* table) {
MOZ_RELEASE_ASSERT(table->kind == TableKind::AnyFunction ||
table->kind == TableKind::TypedFunction,
"cranelift doesn't support AnyRef tables yet.");
MOZ_RELEASE_ASSERT(
table->kind == TableKind::FuncRef || table->kind == TableKind::AsmJS,
"cranelift doesn't support AnyRef tables yet.");
return globalToTlsOffset(table->globalDataOffset);
}

Просмотреть файл

@ -1261,6 +1261,7 @@ static const char* ThunkedNativeToDescription(SymbolicAddress func) {
case SymbolicAddress::CallImport_I32:
case SymbolicAddress::CallImport_I64:
case SymbolicAddress::CallImport_F64:
case SymbolicAddress::CallImport_FuncRef:
case SymbolicAddress::CallImport_AnyRef:
case SymbolicAddress::CoerceInPlace_ToInt32:
case SymbolicAddress::CoerceInPlace_ToNumber:

Просмотреть файл

@ -367,10 +367,10 @@ bool ModuleGenerator::init(Metadata* maybeAsmJSMetadata) {
}
for (const ElemSegment* seg : env_->elemSegments) {
TableKind kind = !seg->active() ? TableKind::AnyFunction
TableKind kind = !seg->active() ? TableKind::FuncRef
: env_->tables[seg->tableIndex].kind;
switch (kind) {
case TableKind::AnyFunction:
case TableKind::FuncRef:
for (uint32_t funcIndex : seg->elemFuncIndices) {
if (funcIndex == NullFuncIndex) {
continue;
@ -378,7 +378,7 @@ bool ModuleGenerator::init(Metadata* maybeAsmJSMetadata) {
addOrMerge(ExportedFunc(funcIndex, false));
}
break;
case TableKind::TypedFunction:
case TableKind::AsmJS:
// asm.js functions are not exported.
break;
case TableKind::AnyRef:

Просмотреть файл

@ -131,6 +131,7 @@ bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
case ValType::F64:
args[i].set(JS::CanonicalizedDoubleValue(*(double*)&argv[i]));
break;
case ValType::FuncRef:
case ValType::AnyRef: {
args[i].set(UnboxAnyRef(AnyRef::fromCompiledCode(*(void**)&argv[i])));
break;
@ -219,7 +220,7 @@ bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
type = TypeSet::DoubleType();
break;
case ValType::Ref:
MOZ_CRASH("case guarded above");
case ValType::FuncRef:
case ValType::AnyRef:
MOZ_CRASH("case guarded above");
case ValType::I64:
@ -305,12 +306,32 @@ Instance::callImport_anyref(Instance* instance, int32_t funcImportIndex,
if (!BoxAnyRef(cx, rval, &result)) {
return false;
}
static_assert(sizeof(argv[0]) >= sizeof(void*), "fits");
*(void**)argv = result.get().forCompiledCode();
return true;
}
/* static */ uint32_t /* infallible */
Instance::memoryGrow_i32(Instance* instance, uint32_t delta) {
/* static */ int32_t /* 0 to signal trap; 1 to signal OK */
Instance::callImport_funcref(Instance* instance, int32_t funcImportIndex,
int32_t argc, uint64_t* argv) {
JSContext* cx = TlsContext.get();
RootedValue rval(cx);
if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) {
return false;
}
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, rval, &fun)) {
return false;
}
*(void**)argv = fun;
return true;
}
/* static */ uint32_t Instance::memoryGrow_i32(Instance* instance,
uint32_t delta) {
MOZ_ASSERT(SASigMemoryGrow.failureMode == FailureMode::Infallible);
MOZ_ASSERT(!instance->isAsmJS());
JSContext* cx = TlsContext.get();
@ -325,8 +346,9 @@ Instance::memoryGrow_i32(Instance* instance, uint32_t delta) {
return ret;
}
/* static */ uint32_t /* infallible */
Instance::memorySize_i32(Instance* instance) {
/* static */ uint32_t Instance::memorySize_i32(Instance* instance) {
MOZ_ASSERT(SASigMemorySize.failureMode == FailureMode::Infallible);
// This invariant must hold when running Wasm code. Assert it here so we can
// write tests for cross-realm calls.
MOZ_ASSERT(TlsContext.get()->realm() == instance->realm());
@ -374,20 +396,22 @@ static int32_t PerformWait(Instance* instance, uint32_t byteOffset, T value,
}
}
/* static */ int32_t /* -1 to signal trap; nonnegative result for ok */
Instance::wait_i32(Instance* instance, uint32_t byteOffset, int32_t value,
int64_t timeout_ns) {
/* static */ int32_t Instance::wait_i32(Instance* instance, uint32_t byteOffset,
int32_t value, int64_t timeout_ns) {
MOZ_ASSERT(SASigWaitI32.failureMode == FailureMode::FailOnNegI32);
return PerformWait<int32_t>(instance, byteOffset, value, timeout_ns);
}
/* static */ int32_t /* -1 to signal trap; nonnegative result for ok */
Instance::wait_i64(Instance* instance, uint32_t byteOffset, int64_t value,
int64_t timeout_ns) {
/* static */ int32_t Instance::wait_i64(Instance* instance, uint32_t byteOffset,
int64_t value, int64_t timeout_ns) {
MOZ_ASSERT(SASigWaitI64.failureMode == FailureMode::FailOnNegI32);
return PerformWait<int64_t>(instance, byteOffset, value, timeout_ns);
}
/* static */ int32_t /* -1 to signal trap; nonnegative for ok */
Instance::wake(Instance* instance, uint32_t byteOffset, int32_t count) {
/* static */ int32_t Instance::wake(Instance* instance, uint32_t byteOffset,
int32_t count) {
MOZ_ASSERT(SASigWake.failureMode == FailureMode::FailOnNegI32);
JSContext* cx = TlsContext.get();
// The alignment guard is not in the wasm spec as of 2017-11-02, but is
@ -418,9 +442,11 @@ Instance::wake(Instance* instance, uint32_t byteOffset, int32_t count) {
return int32_t(woken);
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::memCopy(Instance* instance, uint32_t dstByteOffset,
uint32_t srcByteOffset, uint32_t len) {
/* static */ int32_t Instance::memCopy(Instance* instance,
uint32_t dstByteOffset,
uint32_t srcByteOffset, uint32_t len) {
MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
WasmMemoryObject* mem = instance->memory();
uint32_t memLen = mem->volatileMemoryLength();
@ -486,8 +512,9 @@ Instance::memCopy(Instance* instance, uint32_t dstByteOffset,
return -1;
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::dataDrop(Instance* instance, uint32_t segIndex) {
/* static */ int32_t Instance::dataDrop(Instance* instance, uint32_t segIndex) {
MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
"ensured by validation");
@ -505,9 +532,10 @@ Instance::dataDrop(Instance* instance, uint32_t segIndex) {
return 0;
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::memFill(Instance* instance, uint32_t byteOffset, uint32_t value,
uint32_t len) {
/* static */ int32_t Instance::memFill(Instance* instance, uint32_t byteOffset,
uint32_t value, uint32_t len) {
MOZ_ASSERT(SASigMemFill.failureMode == FailureMode::FailOnNegI32);
WasmMemoryObject* mem = instance->memory();
uint32_t memLen = mem->volatileMemoryLength();
@ -558,9 +586,11 @@ Instance::memFill(Instance* instance, uint32_t byteOffset, uint32_t value,
return -1;
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::memInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
uint32_t len, uint32_t segIndex) {
/* static */ int32_t Instance::memInit(Instance* instance, uint32_t dstOffset,
uint32_t srcOffset, uint32_t len,
uint32_t segIndex) {
MOZ_ASSERT(SASigMemInit.failureMode == FailureMode::FailOnNegI32);
MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
"ensured by validation");
@ -636,10 +666,12 @@ Instance::memInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
return -1;
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::tableCopy(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
uint32_t len, uint32_t dstTableIndex,
uint32_t srcTableIndex) {
/* static */ int32_t Instance::tableCopy(Instance* instance, uint32_t dstOffset,
uint32_t srcOffset, uint32_t len,
uint32_t dstTableIndex,
uint32_t srcTableIndex) {
MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32);
const SharedTable& srcTable = instance->tables()[srcTableIndex];
uint32_t srcTableLen = srcTable->length();
@ -713,8 +745,9 @@ Instance::tableCopy(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
return -1;
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::elemDrop(Instance* instance, uint32_t segIndex) {
/* static */ int32_t Instance::elemDrop(Instance* instance, uint32_t segIndex) {
MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
"ensured by validation");
@ -771,20 +804,23 @@ void Instance::initElems(uint32_t tableIndex, const ElemSegment& seg,
calleeInstanceObj->getExportedFunctionCodeRange(fun, calleeTier);
void* code = calleeInstance.codeBase(calleeTier) +
calleeCodeRange.funcTableEntry();
table.setAnyFunc(dstOffset + i, code, &calleeInstance);
table.setFuncRef(dstOffset + i, code, &calleeInstance);
continue;
}
}
void* code = codeBaseTier +
codeRanges[funcToCodeRange[funcIndex]].funcTableEntry();
table.setAnyFunc(dstOffset + i, code, this);
table.setFuncRef(dstOffset + i, code, this);
}
}
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::tableInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
uint32_t len, uint32_t segIndex, uint32_t tableIndex) {
/* static */ int32_t Instance::tableInit(Instance* instance, uint32_t dstOffset,
uint32_t srcOffset, uint32_t len,
uint32_t segIndex,
uint32_t tableIndex) {
MOZ_ASSERT(SASigTableInit.failureMode == FailureMode::FailOnNegI32);
MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
"ensured by validation");
@ -803,7 +839,7 @@ Instance::tableInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
// Element segments cannot currently contain arbitrary values, and anyref
// tables cannot be initialized from segments.
MOZ_ASSERT(table.kind() == TableKind::AnyFunction);
MOZ_ASSERT(table.kind() == TableKind::FuncRef);
// We are proposing to copy
//
@ -851,9 +887,11 @@ Instance::tableInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset,
return -1;
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::tableFill(Instance* instance, uint32_t start, void* value,
uint32_t len, uint32_t tableIndex) {
/* static */ int32_t Instance::tableFill(Instance* instance, uint32_t start,
void* value, uint32_t len,
uint32_t tableIndex) {
MOZ_ASSERT(SASigTableFill.failureMode == FailureMode::FailOnNegI32);
Table& table = *instance->tables()[tableIndex];
MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
@ -897,31 +935,23 @@ Instance::tableFill(Instance* instance, uint32_t start, void* value,
return -1;
}
// The return convention for tableGet() is awkward but avoids a situation where
// Ion code has to hold a value that may or may not be a pointer to GC'd
// storage, or where Ion has to pass in a pointer to storage where a return
// value can be written.
//
// Note carefully that the pointer that is returned may not be valid past
// operations that change the size of the table or cause GC work; it is strictly
// to be used to retrieve the return value.
/* static */ void* /* nullptr to signal trap; pointer to table location
otherwise */
Instance::tableGet(Instance* instance, uint32_t index, uint32_t tableIndex) {
/* static */ void* Instance::tableGet(Instance* instance, uint32_t index,
uint32_t tableIndex) {
MOZ_ASSERT(SASigTableGet.failureMode == FailureMode::FailOnInvalidRef);
const Table& table = *instance->tables()[tableIndex];
MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
if (index >= table.length()) {
JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr,
JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
return nullptr;
return AnyRef::invalid().forCompiledCode();
}
return const_cast<void*>(table.getShortlivedAnyRefLocForCompiledCode(index));
return table.getAnyRef(index).forCompiledCode();
}
/* static */ uint32_t /* infallible */
Instance::tableGrow(Instance* instance, void* initValue, uint32_t delta,
uint32_t tableIndex) {
/* static */ uint32_t Instance::tableGrow(Instance* instance, void* initValue,
uint32_t delta, uint32_t tableIndex) {
MOZ_ASSERT(SASigTableGrow.failureMode == FailureMode::Infallible);
RootedAnyRef obj(TlsContext.get(), AnyRef::fromCompiledCode(initValue));
Table& table = *instance->tables()[tableIndex];
MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
@ -935,9 +965,10 @@ Instance::tableGrow(Instance* instance, void* initValue, uint32_t delta,
return oldSize;
}
/* static */ int32_t /* -1 to signal trap; 0 for ok */
Instance::tableSet(Instance* instance, uint32_t index, void* value,
uint32_t tableIndex) {
/* static */ int32_t Instance::tableSet(Instance* instance, uint32_t index,
void* value, uint32_t tableIndex) {
MOZ_ASSERT(SASigTableSet.failureMode == FailureMode::FailOnNegI32);
Table& table = *instance->tables()[tableIndex];
MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef);
if (index >= table.length()) {
@ -949,20 +980,23 @@ Instance::tableSet(Instance* instance, uint32_t index, void* value,
return 0;
}
/* static */ uint32_t /* infallible */
Instance::tableSize(Instance* instance, uint32_t tableIndex) {
/* static */ uint32_t Instance::tableSize(Instance* instance,
uint32_t tableIndex) {
MOZ_ASSERT(SASigTableSize.failureMode == FailureMode::Infallible);
Table& table = *instance->tables()[tableIndex];
return table.length();
}
/* static */ void /* infallible */
Instance::postBarrier(Instance* instance, gc::Cell** location) {
/* static */ void Instance::postBarrier(Instance* instance,
gc::Cell** location) {
MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible);
MOZ_ASSERT(location);
TlsContext.get()->runtime()->gc.storeBuffer().putCell(location);
}
/* static */ void /* infallible */
Instance::postBarrierFiltering(Instance* instance, gc::Cell** location) {
/* static */ void Instance::postBarrierFiltering(Instance* instance,
gc::Cell** location) {
MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible);
MOZ_ASSERT(location);
if (*location == nullptr || !gc::IsInsideNursery(*location)) {
return;
@ -976,16 +1010,19 @@ Instance::postBarrierFiltering(Instance* instance, gc::Cell** location) {
// When we fail to allocate we return a nullptr; the wasm side must check this
// and propagate it as an error.
/* static */ void* /* null on OOM, otherwise a pointer */
Instance::structNew(Instance* instance, uint32_t typeIndex) {
/* static */ void* Instance::structNew(Instance* instance, uint32_t typeIndex) {
MOZ_ASSERT(SASigStructNew.failureMode == FailureMode::FailOnNullPtr);
JSContext* cx = TlsContext.get();
Rooted<TypeDescr*> typeDescr(cx, instance->structTypeDescrs_[typeIndex]);
return TypedObject::createZeroed(cx, typeDescr);
}
/* static */ void* /* infallible */
Instance::structNarrow(Instance* instance, uint32_t mustUnboxAnyref,
uint32_t outputTypeIndex, void* maybeNullPtr) {
/* static */ void* Instance::structNarrow(Instance* instance,
uint32_t mustUnboxAnyref,
uint32_t outputTypeIndex,
void* maybeNullPtr) {
MOZ_ASSERT(SASigStructNarrow.failureMode == FailureMode::Infallible);
JSContext* cx = TlsContext.get();
Rooted<TypedObject*> obj(cx);
@ -1059,7 +1096,7 @@ Instance::structNarrow(Instance* instance, uint32_t mustUnboxAnyref,
// Either the written location is in the global data section in the
// WasmInstanceObject, or the Cell of a WasmGlobalObject:
//
// - WasmInstanceObjects are always tenured and u.ref_/anyref_ may point to a
// - WasmInstanceObjects are always tenured and u.ref_ may point to a
// nursery object, so we need a post-barrier since the global data of an
// instance is effectively a field of the WasmInstanceObject.
//
@ -1088,33 +1125,22 @@ void CopyValPostBarriered(uint8_t* dst, const Val& src) {
memcpy(dst, &x, sizeof(x));
break;
}
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef: {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
MOZ_ASSERT(*(void**)dst == nullptr,
"should be null so no need for a pre-barrier");
AnyRef x = src.anyref();
memcpy(dst, x.asJSObjectAddress(), sizeof(x));
AnyRef x = src.ref();
memcpy(dst, x.asJSObjectAddress(), sizeof(*x.asJSObjectAddress()));
if (!x.isNull()) {
JSObject::writeBarrierPost((JSObject**)dst, nullptr, x.asJSObject());
}
break;
}
case ValType::Ref: {
MOZ_ASSERT(*(JSObject**)dst == nullptr,
"should be null so no need for a pre-barrier");
JSObject* x = src.ref();
memcpy(dst, &x, sizeof(x));
if (x) {
JSObject::writeBarrierPost((JSObject**)dst, nullptr, x);
}
break;
}
case ValType::NullRef: {
break;
}
default: {
MOZ_CRASH("unexpected Val type");
}
}
@ -1402,13 +1428,13 @@ void Instance::tracePrivate(JSTracer* trc) {
}
for (const GlobalDesc& global : code().metadata().globals) {
// Indirect anyref global get traced by the owning WebAssembly.Global.
// Indirect reference globals get traced by the owning WebAssembly.Global.
if (!global.type().isReference() || global.isConstant() ||
global.isIndirect()) {
continue;
}
GCPtrObject* obj = (GCPtrObject*)(globalData() + global.offset());
TraceNullableEdge(trc, obj, "wasm ref/anyref global");
TraceNullableEdge(trc, obj, "wasm reference-typed global");
}
TraceNullableEdge(trc, &memory_, "wasm buffer");
@ -1656,7 +1682,7 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
}
ASSERT_ANYREF_IS_JSOBJECT;
Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> anyrefs(cx);
Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> refs(cx);
DebugCodegen(DebugChannel::Function, "wasm-function[%d]; arguments ",
funcIndex);
@ -1666,7 +1692,6 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
switch (funcType->arg(i).code()) {
case ValType::I32:
if (!ToInt32(cx, v, (int32_t*)&exportArgs[i])) {
DebugCodegen(DebugChannel::Function, "call to ToInt32 failed!\n");
return false;
}
DebugCodegen(DebugChannel::Function, "i32(%d) ",
@ -1676,8 +1701,6 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
MOZ_CRASH("unexpected i64 flowing into callExport");
case ValType::F32:
if (!RoundFloat32(cx, v, (float*)&exportArgs[i])) {
DebugCodegen(DebugChannel::Function,
"call to RoundFloat32 failed!\n");
return false;
}
DebugCodegen(DebugChannel::Function, "f32(%f) ",
@ -1685,7 +1708,6 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
break;
case ValType::F64:
if (!ToNumber(cx, v, (double*)&exportArgs[i])) {
DebugCodegen(DebugChannel::Function, "call to ToNumber failed!\n");
return false;
}
DebugCodegen(DebugChannel::Function, "f64(%lf) ",
@ -1693,20 +1715,32 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
break;
case ValType::Ref:
MOZ_CRASH("temporarily unsupported Ref type in callExport");
case ValType::AnyRef: {
RootedAnyRef ar(cx, AnyRef::null());
if (!BoxAnyRef(cx, v, &ar)) {
DebugCodegen(DebugChannel::Function, "call to BoxAnyRef failed!\n");
case ValType::FuncRef: {
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, v, &fun)) {
return false;
}
// We'll copy the value into the arguments array just before the call;
// for now tuck the value away in a rooted array.
// Store in rooted array until no more GC is possible.
ASSERT_ANYREF_IS_JSOBJECT;
if (!anyrefs.emplaceBack(ar.get().asJSObject())) {
if (!refs.emplaceBack(fun)) {
return false;
}
DebugCodegen(DebugChannel::Function, "ptr(#%d) ",
int(anyrefs.length() - 1));
int(refs.length() - 1));
break;
}
case ValType::AnyRef: {
RootedAnyRef ar(cx, AnyRef::null());
if (!BoxAnyRef(cx, v, &ar)) {
return false;
}
// Store in rooted array until no more GC is possible.
ASSERT_ANYREF_IS_JSOBJECT;
if (!refs.emplaceBack(ar.get().asJSObject())) {
return false;
}
DebugCodegen(DebugChannel::Function, "ptr(#%d) ",
int(refs.length() - 1));
break;
}
case ValType::NullRef: {
@ -1718,18 +1752,18 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
DebugCodegen(DebugChannel::Function, "\n");
// Copy over reference values from the rooted array, if any.
if (anyrefs.length() > 0) {
if (refs.length() > 0) {
DebugCodegen(DebugChannel::Function, "; ");
size_t nextRef = 0;
for (size_t i = 0; i < funcType->args().length(); ++i) {
if (funcType->arg(i).isReference()) {
ASSERT_ANYREF_IS_JSOBJECT;
*(void**)&exportArgs[i] = (void*)anyrefs[nextRef++];
*(void**)&exportArgs[i] = (void*)refs[nextRef++];
DebugCodegen(DebugChannel::Function, "ptr(#%d) = %p ", int(nextRef - 1),
*(void**)&exportArgs[i]);
}
}
anyrefs.clear();
refs.clear();
}
{
@ -1783,6 +1817,7 @@ bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args) {
break;
case ExprType::Ref:
MOZ_CRASH("temporarily unsupported Ref type in callExport");
case ExprType::FuncRef:
case ExprType::AnyRef:
args.rval().set(UnboxAnyRef(AnyRef::fromCompiledCode(*(void**)retAddr)));
DebugCodegen(DebugChannel::Function, "ptr(%p)", *(void**)retAddr);

Просмотреть файл

@ -182,6 +182,7 @@ class Instance {
static int32_t callImport_i64(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_f64(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_anyref(Instance*, int32_t, int32_t, uint64_t*);
static int32_t callImport_funcref(Instance*, int32_t, int32_t, uint64_t*);
static uint32_t memoryGrow_i32(Instance* instance, uint32_t delta);
static uint32_t memorySize_i32(Instance* instance);
static int32_t wait_i32(Instance* instance, uint32_t byteOffset,

Просмотреть файл

@ -183,6 +183,7 @@ class FunctionCompiler {
ins = MConstant::New(alloc(), DoubleValue(0.0), MIRType::Double);
break;
case ValType::Ref:
case ValType::FuncRef:
case ValType::AnyRef:
ins = MWasmNullConstant::New(alloc());
break;
@ -680,66 +681,6 @@ class FunctionCompiler {
return ins;
}
bool checkI32NegativeMeansFailedResult(MDefinition* value) {
if (inDeadCode()) {
return true;
}
auto* zero = constant(Int32Value(0), MIRType::Int32);
auto* cond = compare(value, zero, JSOP_LT, MCompare::Compare_Int32);
MBasicBlock* failBlock;
if (!newBlock(curBlock_, &failBlock)) {
return false;
}
MBasicBlock* okBlock;
if (!newBlock(curBlock_, &okBlock)) {
return false;
}
curBlock_->end(MTest::New(alloc(), cond, failBlock, okBlock));
failBlock->end(
MWasmTrap::New(alloc(), wasm::Trap::ThrowReported, bytecodeOffset()));
curBlock_ = okBlock;
return true;
}
bool checkPointerNullMeansFailedResult(MDefinition* value) {
if (inDeadCode()) {
return true;
}
auto* cond = MIsNullPointer::New(alloc(), value);
curBlock_->add(cond);
MBasicBlock* failBlock;
if (!newBlock(curBlock_, &failBlock)) {
return false;
}
MBasicBlock* okBlock;
if (!newBlock(curBlock_, &okBlock)) {
return false;
}
curBlock_->end(MTest::New(alloc(), cond, failBlock, okBlock));
failBlock->end(
MWasmTrap::New(alloc(), wasm::Trap::ThrowReported, bytecodeOffset()));
curBlock_ = okBlock;
return true;
}
MDefinition* derefTableElementPointer(MDefinition* base) {
// Table element storage may be moved by GC operations, so reads from that
// storage are not movable.
MWasmLoadRef* load =
MWasmLoadRef::New(alloc(), base, AliasSet::WasmTableElement,
/*isMovable=*/false);
curBlock_->add(load);
return load;
}
MDefinition* load(MDefinition* base, MemoryAccessDesc* access,
ValType result) {
if (inDeadCode()) {
@ -1157,6 +1098,8 @@ class FunctionCompiler {
return true;
}
MOZ_ASSERT(builtin.failureMode == FailureMode::Infallible);
CallSiteDesc desc(lineOrBytecode, CallSiteDesc::Symbolic);
auto callee = CalleeDesc::builtin(builtin.identity);
auto* ins =
@ -1174,22 +1117,27 @@ class FunctionCompiler {
bool builtinInstanceMethodCall(const SymbolicAddressSignature& builtin,
uint32_t lineOrBytecode,
const CallCompileState& call,
MDefinition** def) {
MDefinition** def = nullptr) {
MOZ_ASSERT_IF(!def, builtin.retType == MIRType::None);
if (inDeadCode()) {
*def = nullptr;
if (def) {
*def = nullptr;
}
return true;
}
CallSiteDesc desc(lineOrBytecode, CallSiteDesc::Symbolic);
auto* ins = MWasmCall::NewBuiltinInstanceMethodCall(
alloc(), desc, builtin.identity, call.instanceArg_, call.regArgs_,
builtin.retType, StackArgAreaSizeUnaligned(builtin));
alloc(), desc, builtin.identity, builtin.failureMode, call.instanceArg_,
call.regArgs_, builtin.retType, StackArgAreaSizeUnaligned(builtin));
if (!ins) {
return false;
}
curBlock_->add(ins);
*def = ins;
if (def) {
*def = ins;
}
return true;
}
@ -2183,8 +2131,9 @@ static bool EmitGetGlobal(FunctionCompiler& f) {
case ValType::F64:
result = f.constant(value.f64());
break;
case ValType::FuncRef:
case ValType::AnyRef:
MOZ_ASSERT(value.anyref().isNull());
MOZ_ASSERT(value.ref().isNull());
result = f.nullRefConstant();
break;
default:
@ -2223,8 +2172,7 @@ static bool EmitSetGlobal(FunctionCompiler& f) {
return false;
}
f.finishCall(&args);
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args)) {
return false;
}
}
@ -2848,10 +2796,6 @@ static bool EmitWait(FunctionCompiler& f, ValType type, uint32_t byteSize) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
f.iter().setResult(ret);
return true;
}
@ -2895,10 +2839,6 @@ static bool EmitWake(FunctionCompiler& f) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
f.iter().setResult(ret);
return true;
}
@ -2974,16 +2914,7 @@ static bool EmitMemOrTableCopy(FunctionCompiler& f, bool isMem) {
return false;
}
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
}
static bool EmitDataOrElemDrop(FunctionCompiler& f, bool isData) {
@ -3015,16 +2946,7 @@ static bool EmitDataOrElemDrop(FunctionCompiler& f, bool isData) {
return false;
}
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
}
static bool EmitMemFill(FunctionCompiler& f) {
@ -3059,16 +2981,7 @@ static bool EmitMemFill(FunctionCompiler& f) {
return false;
}
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
}
static bool EmitMemOrTableInit(FunctionCompiler& f, bool isMem) {
@ -3120,16 +3033,7 @@ static bool EmitMemOrTableInit(FunctionCompiler& f, bool isMem) {
return false;
}
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
}
#endif // ENABLE_WASM_BULKMEM_OPS
@ -3179,16 +3083,7 @@ static bool EmitTableFill(FunctionCompiler& f) {
return false;
}
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
}
static bool EmitTableGet(FunctionCompiler& f) {
@ -3229,16 +3124,8 @@ static bool EmitTableGet(FunctionCompiler& f) {
// The return value here is either null, denoting an error, or a short-lived
// pointer to a location containing a possibly-null ref.
MDefinition* result;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &result)) {
return false;
}
if (!f.checkPointerNullMeansFailedResult(result)) {
return false;
}
MDefinition* ret = f.derefTableElementPointer(result);
if (!ret) {
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
@ -3337,14 +3224,7 @@ static bool EmitTableSet(FunctionCompiler& f) {
return false;
}
MDefinition* ret;
if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) {
return false;
}
if (!f.checkI32NegativeMeansFailedResult(ret)) {
return false;
}
return true;
return f.builtinInstanceMethodCall(callee, lineOrBytecode, args);
}
static bool EmitTableSize(FunctionCompiler& f) {

Просмотреть файл

@ -178,12 +178,20 @@ static bool ToWebAssemblyValue(JSContext* cx, ValType targetType, HandleValue v,
val.set(Val(d));
return true;
}
case ValType::FuncRef: {
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, v, &fun)) {
return false;
}
val.set(Val(ValType::FuncRef, AnyRef::fromJSObject(fun)));
return true;
}
case ValType::AnyRef: {
RootedAnyRef tmp(cx, AnyRef::null());
if (!BoxAnyRef(cx, v, &tmp)) {
return false;
}
val.set(Val(tmp));
val.set(Val(ValType::AnyRef, tmp));
return true;
}
case ValType::Ref:
@ -203,8 +211,9 @@ static Value ToJSValue(const Val& val) {
return DoubleValue(JS::CanonicalizeNaN(double(val.f32())));
case ValType::F64:
return DoubleValue(JS::CanonicalizeNaN(val.f64()));
case ValType::FuncRef:
case ValType::AnyRef:
return UnboxAnyRef(val.anyref());
return UnboxAnyRef(val.ref());
case ValType::Ref:
case ValType::NullRef:
case ValType::I64:
@ -1536,6 +1545,29 @@ bool wasm::IsWasmExportedFunction(JSFunction* fun) {
return fun->kind() == JSFunction::Wasm;
}
bool wasm::CheckFuncRefValue(JSContext* cx, HandleValue v,
MutableHandleFunction fun) {
if (v.isNull()) {
MOZ_ASSERT(!fun);
return true;
}
if (v.isObject()) {
JSObject& obj = v.toObject();
if (obj.is<JSFunction>()) {
JSFunction* f = &obj.as<JSFunction>();
if (IsWasmExportedFunction(f)) {
fun.set(f);
return true;
}
}
}
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_FUNCREF_VALUE);
return false;
}
Instance& wasm::ExportedFunctionToInstance(JSFunction* fun) {
return ExportedFunctionToInstanceObject(fun)->instance();
}
@ -2006,7 +2038,7 @@ bool WasmTableObject::construct(JSContext* cx, unsigned argc, Value* vp) {
TableKind tableKind;
if (StringEqualsAscii(elementLinearStr, "anyfunc") ||
StringEqualsAscii(elementLinearStr, "funcref")) {
tableKind = TableKind::AnyFunction;
tableKind = TableKind::FuncRef;
#ifdef ENABLE_WASM_REFTYPES
} else if (StringEqualsAscii(elementLinearStr, "anyref")) {
if (!HasReftypesSupport(cx)) {
@ -2095,8 +2127,8 @@ bool WasmTableObject::getImpl(JSContext* cx, const CallArgs& args) {
}
switch (table.kind()) {
case TableKind::AnyFunction: {
const FunctionTableElem& elem = table.getAnyFunc(index);
case TableKind::FuncRef: {
const FunctionTableElem& elem = table.getFuncRef(index);
if (!elem.code) {
args.rval().setNull();
return true;
@ -2134,6 +2166,13 @@ bool WasmTableObject::get(JSContext* cx, unsigned argc, Value* vp) {
static void TableFunctionFill(JSContext* cx, Table* table, HandleFunction value,
uint32_t index, uint32_t limit) {
if (!value) {
while (index < limit) {
table->setNull(index++);
}
return;
}
RootedWasmInstanceObject instanceObj(cx,
ExportedFunctionToInstanceObject(value));
uint32_t funcIndex = ExportedFunctionToFuncIndex(value);
@ -2151,24 +2190,10 @@ static void TableFunctionFill(JSContext* cx, Table* table, HandleFunction value,
metadata.codeRange(metadata.lookupFuncExport(funcIndex));
void* code = instance.codeBase(tier) + codeRange.funcTableEntry();
while (index < limit) {
table->setAnyFunc(index++, code, &instance);
table->setFuncRef(index++, code, &instance);
}
}
static bool IsWasmExportedFunction(const Value& v, MutableHandleFunction f) {
if (!v.isObject()) {
return false;
}
JSObject& obj = v.toObject();
if (!obj.is<JSFunction>() || !IsWasmExportedFunction(&obj.as<JSFunction>())) {
return false;
}
f.set(&obj.as<JSFunction>());
return true;
}
/* static */
bool WasmTableObject::setImpl(JSContext* cx, const CallArgs& args) {
RootedWasmTableObject tableObj(
@ -2186,21 +2211,14 @@ bool WasmTableObject::setImpl(JSContext* cx, const CallArgs& args) {
RootedValue fillValue(cx, args[1]);
switch (table.kind()) {
case TableKind::AnyFunction: {
RootedFunction value(cx);
if (!IsWasmExportedFunction(fillValue, &value) && !fillValue.isNull()) {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_TABLE_VALUE);
case TableKind::FuncRef: {
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, fillValue, &fun)) {
return false;
}
if (value) {
MOZ_ASSERT(index < MaxTableLength);
static_assert(MaxTableLength < UINT32_MAX, "Invariant");
TableFunctionFill(cx, &table, value, index, index + 1);
} else {
table.setNull(index);
}
MOZ_ASSERT(index < MaxTableLength);
static_assert(MaxTableLength < UINT32_MAX, "Invariant");
TableFunctionFill(cx, &table, fun, index, index + 1);
break;
}
case TableKind::AnyRef: {
@ -2260,21 +2278,20 @@ bool WasmTableObject::growImpl(JSContext* cx, const CallArgs& args) {
static_assert(MaxTableLength < UINT32_MAX, "Invariant");
switch (table->table().kind()) {
case TableKind::AnyFunction: {
RootedFunction value(cx);
case TableKind::FuncRef: {
if (fillValue.isNull()) {
#ifdef DEBUG
for (uint32_t index = oldLength; index < oldLength + delta; index++) {
MOZ_ASSERT(table->table().getAnyFunc(index).code == nullptr);
MOZ_ASSERT(table->table().getFuncRef(index).code == nullptr);
}
#endif
} else if (IsWasmExportedFunction(fillValue, &value)) {
TableFunctionFill(cx, &table->table(), value, oldLength,
oldLength + delta);
} else {
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_TBL_GROW_INIT, "funcref");
return false;
RootedFunction fun(cx);
if (!CheckFuncRefValue(cx, fillValue, &fun)) {
return false;
}
TableFunctionFill(cx, &table->table(), fun, oldLength,
oldLength + delta);
}
break;
}
@ -2353,14 +2370,14 @@ void WasmGlobalObject::trace(JSTracer* trc, JSObject* obj) {
return;
}
switch (global->type().code()) {
case ValType::FuncRef:
case ValType::AnyRef:
if (!global->cell()->anyref.isNull()) {
if (!global->cell()->ref.isNull()) {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
TraceManuallyBarrieredEdge(trc,
global->cell()->anyref.asJSObjectAddress(),
"wasm anyref global");
TraceManuallyBarrieredEdge(trc, global->cell()->ref.asJSObjectAddress(),
"wasm reference-typed global");
}
break;
case ValType::I32:
@ -2422,22 +2439,22 @@ WasmGlobalObject* WasmGlobalObject::create(JSContext* cx, HandleVal hval,
case ValType::F64:
cell->f64 = val.f64();
break;
case ValType::NullRef:
MOZ_ASSERT(!cell->ref, "value should be null already");
break;
case ValType::FuncRef:
case ValType::AnyRef:
MOZ_ASSERT(cell->anyref.isNull(), "no prebarriers needed");
cell->anyref = val.anyref();
if (!cell->anyref.isNull()) {
MOZ_ASSERT(cell->ref.isNull(), "no prebarriers needed");
cell->ref = val.ref();
if (!cell->ref.isNull()) {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
JSObject::writeBarrierPost(&cell->anyref, nullptr,
cell->anyref.asJSObject());
JSObject::writeBarrierPost(cell->ref.asJSObjectAddress(), nullptr,
cell->ref.asJSObject());
}
break;
case ValType::Ref:
MOZ_CRASH("Ref NYI");
case ValType::NullRef:
MOZ_CRASH("NullRef not expressible");
}
obj->initReservedSlot(TYPE_SLOT,
@ -2504,6 +2521,9 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
} else if (StringEqualsAscii(typeLinearStr, "f64")) {
globalType = ValType::F64;
#ifdef ENABLE_WASM_REFTYPES
} else if (HasReftypesSupport(cx) &&
StringEqualsAscii(typeLinearStr, "funcref")) {
globalType = ValType::FuncRef;
} else if (HasReftypesSupport(cx) &&
StringEqualsAscii(typeLinearStr, "anyref")) {
globalType = ValType::AnyRef;
@ -2533,8 +2553,11 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
case ValType::F64:
globalVal = Val(double(0.0));
break;
case ValType::FuncRef:
globalVal = Val(ValType::FuncRef, AnyRef::null());
break;
case ValType::AnyRef:
globalVal = Val(AnyRef::null());
globalVal = Val(ValType::AnyRef, AnyRef::null());
break;
case ValType::Ref:
MOZ_CRASH("Ref NYI");
@ -2545,7 +2568,7 @@ bool WasmGlobalObject::construct(JSContext* cx, unsigned argc, Value* vp) {
// Override with non-undefined value, if provided.
RootedValue valueVal(cx, args.get(1));
if (!valueVal.isUndefined() ||
(args.length() >= 2 && globalType == ValType::AnyRef)) {
(args.length() >= 2 && globalType.isReference())) {
if (!ToWebAssemblyValue(cx, globalType, valueVal, &globalVal)) {
return false;
}
@ -2570,6 +2593,7 @@ bool WasmGlobalObject::valueGetterImpl(JSContext* cx, const CallArgs& args) {
case ValType::I32:
case ValType::F32:
case ValType::F64:
case ValType::FuncRef:
case ValType::AnyRef:
args.rval().set(args.thisv().toObject().as<WasmGlobalObject>().value(cx));
return true;
@ -2627,17 +2651,18 @@ bool WasmGlobalObject::valueSetterImpl(JSContext* cx, const CallArgs& args) {
case ValType::F64:
cell->f64 = val.get().f64();
break;
case ValType::FuncRef:
case ValType::AnyRef: {
AnyRef prevPtr = cell->anyref;
AnyRef prevPtr = cell->ref;
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
JSObject::writeBarrierPre(prevPtr.asJSObject());
cell->anyref = val.get().anyref();
if (!cell->anyref.isNull()) {
JSObject::writeBarrierPost(cell->anyref.asJSObjectAddress(),
cell->ref = val.get().ref();
if (!cell->ref.isNull()) {
JSObject::writeBarrierPost(cell->ref.asJSObjectAddress(),
prevPtr.asJSObject(),
cell->anyref.asJSObject());
cell->ref.asJSObject());
}
break;
}
@ -2693,8 +2718,11 @@ void WasmGlobalObject::val(MutableHandleVal outval) const {
case ValType::F64:
outval.set(Val(cell->f64));
return;
case ValType::FuncRef:
outval.set(Val(ValType::FuncRef, cell->ref));
return;
case ValType::AnyRef:
outval.set(Val(cell->anyref));
outval.set(Val(ValType::AnyRef, cell->ref));
return;
case ValType::Ref:
MOZ_CRASH("Ref NYI");

Просмотреть файл

@ -102,13 +102,14 @@ MOZ_MUST_USE bool DeserializeModule(JSContext* cx, const Bytes& serialized,
// functions for extracting the instance and func-index of a wasm function
// can be used for both wasm and asm.js, however.
extern bool IsWasmExportedFunction(JSFunction* fun);
bool IsWasmExportedFunction(JSFunction* fun);
bool CheckFuncRefValue(JSContext* cx, HandleValue v, MutableHandleFunction fun);
extern Instance& ExportedFunctionToInstance(JSFunction* fun);
extern WasmInstanceObject* ExportedFunctionToInstanceObject(JSFunction* fun);
extern uint32_t ExportedFunctionToFuncIndex(JSFunction* fun);
Instance& ExportedFunctionToInstance(JSFunction* fun);
WasmInstanceObject* ExportedFunctionToInstanceObject(JSFunction* fun);
uint32_t ExportedFunctionToFuncIndex(JSFunction* fun);
extern bool IsSharedWasmMemoryObject(JSObject* obj);
bool IsSharedWasmMemoryObject(JSObject* obj);
} // namespace wasm
@ -176,8 +177,7 @@ class WasmGlobalObject : public NativeObject {
int64_t i64;
float f32;
double f64;
JSObject* ref; // Note, this breaks an abstraction boundary
wasm::AnyRef anyref;
wasm::AnyRef ref;
Cell() : i64(0) {}
~Cell() {}
};

Просмотреть файл

@ -1228,6 +1228,7 @@ static bool MakeStructField(JSContext* cx, const ValType& v, bool isMutable,
t = GlobalObject::getOrCreateReferenceTypeDescr(
cx, cx->global(), ReferenceType::TYPE_OBJECT);
break;
case ValType::FuncRef:
case ValType::AnyRef:
t = GlobalObject::getOrCreateReferenceTypeDescr(
cx, cx->global(), ReferenceType::TYPE_WASM_ANYREF);

Просмотреть файл

@ -46,6 +46,7 @@ class StackType {
case TypeCode::F32:
case TypeCode::F64:
case TypeCode::AnyRef:
case TypeCode::FuncRef:
case TypeCode::Ref:
case TypeCode::NullRef:
case TypeCode::Limit:
@ -64,6 +65,7 @@ class StackType {
F64 = uint8_t(ValType::F64),
AnyRef = uint8_t(ValType::AnyRef),
FuncRef = uint8_t(ValType::FuncRef),
Ref = uint8_t(ValType::Ref),
NullRef = uint8_t(ValType::NullRef),
@ -83,24 +85,16 @@ class StackType {
Code code() const { return Code(UnpackTypeCodeType(tc_)); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
bool isReference() const {
TypeCode tc = UnpackTypeCodeType(tc_);
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
tc == TypeCode::NullRef;
}
bool isReference() const { return IsReferenceType(tc_); }
bool operator==(const StackType& that) const { return tc_ == that.tc_; }
bool operator!=(const StackType& that) const { return tc_ != that.tc_; }
bool operator==(Code that) const {
MOZ_ASSERT(that != Code::Ref);
return code() == that;
}
bool operator!=(Code that) const { return !(*this == that); }
};
@ -765,6 +759,7 @@ inline bool OpIter<Policy>::readBlockType(ExprType* type) {
case uint8_t(ExprType::F64):
known = true;
break;
case uint8_t(ExprType::FuncRef):
case uint8_t(ExprType::AnyRef):
#ifdef ENABLE_WASM_REFTYPES
known = true;
@ -1564,7 +1559,7 @@ inline bool OpIter<Policy>::readCallIndirect(uint32_t* funcTypeIndex,
}
return fail("table index out of range for call_indirect");
}
if (env_.tables[*tableIndex].kind != TableKind::AnyFunction) {
if (env_.tables[*tableIndex].kind != TableKind::FuncRef) {
return fail("indirect calls must go through a table of 'funcref'");
}
@ -1955,7 +1950,7 @@ inline bool OpIter<Policy>::readMemOrTableInit(bool isMem, uint32_t* segIndex,
// Element segments must carry functions exclusively and funcref is not
// yet a subtype of anyref.
if (env_.tables[*dstTableIndex].kind != TableKind::AnyFunction) {
if (env_.tables[*dstTableIndex].kind != TableKind::FuncRef) {
return fail("only tables of 'funcref' may have element segments");
}
if (*segIndex >= env_.elemSegments.length()) {

Просмотреть файл

@ -312,6 +312,7 @@ static void StoreABIReturn(MacroAssembler& masm, const FuncExport& fe,
masm.storeDouble(ReturnDoubleReg, Address(argv, 0));
break;
case ExprType::Ref:
case ExprType::FuncRef:
case ExprType::AnyRef:
masm.storePtr(ReturnReg, Address(argv, 0));
break;
@ -900,10 +901,9 @@ static bool GenerateJitEntry(MacroAssembler& masm, size_t funcExportIndex,
break;
}
case ExprType::Ref:
MOZ_CRASH("return ref in jitentry NYI");
break;
case ExprType::FuncRef:
case ExprType::AnyRef:
MOZ_CRASH("return anyref in jitentry NYI");
MOZ_CRASH("returning reference in jitentry NYI");
break;
case ExprType::I64:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
@ -1151,6 +1151,7 @@ void wasm::GenerateDirectCallFromJit(MacroAssembler& masm, const FuncExport& fe,
GenPrintF64(DebugChannel::Function, masm, ReturnDoubleReg);
break;
case wasm::ExprType::Ref:
case wasm::ExprType::FuncRef:
case wasm::ExprType::AnyRef:
case wasm::ExprType::I64:
MOZ_CRASH("unexpected return type when calling from ion to wasm");
@ -1552,6 +1553,14 @@ static bool GenerateImportInterpExit(MacroAssembler& masm, const FuncImport& fi,
break;
case ExprType::Ref:
MOZ_CRASH("No Ref support here yet");
case ExprType::FuncRef:
masm.call(SymbolicAddress::CallImport_FuncRef);
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
masm.loadPtr(argv, ReturnReg);
GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; returns ",
funcImportIndex);
GenPrintPtr(DebugChannel::Import, masm, ReturnReg);
break;
case ExprType::AnyRef:
masm.call(SymbolicAddress::CallImport_AnyRef);
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
@ -1753,10 +1762,9 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
GenPrintF64(DebugChannel::Import, masm, ReturnDoubleReg);
break;
case ExprType::Ref:
MOZ_CRASH("ref returned by import (jit exit) NYI");
break;
case ExprType::FuncRef:
case ExprType::AnyRef:
MOZ_CRASH("anyref returned by import (jit exit) NYI");
MOZ_CRASH("reference returned by import (jit exit) NYI");
break;
case ExprType::NullRef:
MOZ_CRASH("NullRef not expressible");

Просмотреть файл

@ -30,7 +30,7 @@ using namespace js::wasm;
using mozilla::CheckedInt;
Table::Table(JSContext* cx, const TableDesc& desc,
HandleWasmTableObject maybeObject, UniqueAnyFuncArray functions)
HandleWasmTableObject maybeObject, UniqueFuncRefArray functions)
: maybeObject_(maybeObject),
observers_(cx->zone()),
functions_(std::move(functions)),
@ -55,9 +55,9 @@ Table::Table(JSContext* cx, const TableDesc& desc,
SharedTable Table::create(JSContext* cx, const TableDesc& desc,
HandleWasmTableObject maybeObject) {
switch (desc.kind) {
case TableKind::AnyFunction:
case TableKind::TypedFunction: {
UniqueAnyFuncArray functions(
case TableKind::FuncRef:
case TableKind::AsmJS: {
UniqueFuncRefArray functions(
cx->pod_calloc<FunctionTableElem>(desc.limits.initial));
if (!functions) {
return nullptr;
@ -90,7 +90,7 @@ void Table::tracePrivate(JSTracer* trc) {
}
switch (kind_) {
case TableKind::AnyFunction: {
case TableKind::FuncRef: {
for (uint32_t i = 0; i < length_; i++) {
if (functions_[i].tls) {
functions_[i].tls->instance->trace(trc);
@ -104,7 +104,7 @@ void Table::tracePrivate(JSTracer* trc) {
objects_.trace(trc);
break;
}
case TableKind::TypedFunction: {
case TableKind::AsmJS: {
#ifdef DEBUG
for (uint32_t i = 0; i < length_; i++) {
MOZ_ASSERT(!functions_[i].tls);
@ -135,7 +135,7 @@ uint8_t* Table::functionBase() const {
return (uint8_t*)functions_.get();
}
const FunctionTableElem& Table::getAnyFunc(uint32_t index) const {
const FunctionTableElem& Table::getFuncRef(uint32_t index) const {
MOZ_ASSERT(isFunction());
return functions_[index];
}
@ -148,13 +148,7 @@ AnyRef Table::getAnyRef(uint32_t index) const {
return AnyRef::fromJSObject(objects_[index]);
}
const void* Table::getShortlivedAnyRefLocForCompiledCode(uint32_t index) const {
MOZ_ASSERT(!isFunction());
return const_cast<HeapPtr<JSObject*>&>(objects_[index])
.unsafeUnbarrieredForTracing();
}
void Table::setAnyFunc(uint32_t index, void* code, const Instance* instance) {
void Table::setFuncRef(uint32_t index, void* code, const Instance* instance) {
MOZ_ASSERT(isFunction());
FunctionTableElem& elem = functions_[index];
@ -163,13 +157,13 @@ void Table::setAnyFunc(uint32_t index, void* code, const Instance* instance) {
}
switch (kind_) {
case TableKind::AnyFunction:
case TableKind::FuncRef:
elem.code = code;
elem.tls = instance->tlsData();
MOZ_ASSERT(elem.tls->instance->objectUnbarriered()->isTenured(),
"no writeBarrierPost (Table::set)");
break;
case TableKind::TypedFunction:
case TableKind::AsmJS:
elem.code = code;
elem.tls = nullptr;
break;
@ -188,7 +182,7 @@ void Table::setAnyRef(uint32_t index, AnyRef new_obj) {
void Table::setNull(uint32_t index) {
switch (kind_) {
case TableKind::AnyFunction: {
case TableKind::FuncRef: {
FunctionTableElem& elem = functions_[index];
if (elem.tls) {
JSObject::writeBarrierPre(elem.tls->instance->objectUnbarriered());
@ -202,7 +196,7 @@ void Table::setNull(uint32_t index) {
setAnyRef(index, AnyRef::null());
break;
}
case TableKind::TypedFunction: {
case TableKind::AsmJS: {
MOZ_CRASH("Should not happen");
}
}
@ -210,7 +204,7 @@ void Table::setNull(uint32_t index) {
void Table::copy(const Table& srcTable, uint32_t dstIndex, uint32_t srcIndex) {
switch (kind_) {
case TableKind::AnyFunction: {
case TableKind::FuncRef: {
FunctionTableElem& dst = functions_[dstIndex];
if (dst.tls) {
JSObject::writeBarrierPre(dst.tls->instance->objectUnbarriered());
@ -233,7 +227,7 @@ void Table::copy(const Table& srcTable, uint32_t dstIndex, uint32_t srcIndex) {
setAnyRef(dstIndex, srcTable.getAnyRef(srcIndex));
break;
}
case TableKind::TypedFunction: {
case TableKind::AsmJS: {
MOZ_CRASH("Bad table type");
}
}
@ -264,7 +258,7 @@ uint32_t Table::grow(uint32_t delta, JSContext* cx) {
cx->runtime(); // Use JSRuntime's MallocProvider to avoid throwing.
switch (kind_) {
case TableKind::AnyFunction: {
case TableKind::FuncRef: {
// Note that realloc does not release functions_'s pointee on failure
// which is exactly what we need here.
FunctionTableElem* newFunctions = rt->pod_realloc<FunctionTableElem>(
@ -285,7 +279,7 @@ uint32_t Table::grow(uint32_t delta, JSContext* cx) {
}
break;
}
case TableKind::TypedFunction: {
case TableKind::AsmJS: {
MOZ_CRASH("Bad table type");
}
}

Просмотреть файл

@ -29,7 +29,7 @@ namespace wasm {
// stateful objects exposed to WebAssembly. asm.js also uses Tables to represent
// its homogeneous function-pointer tables.
//
// A table of AnyFunction holds FunctionTableElems, which are (instance*,index)
// A table of FuncRef holds FunctionTableElems, which are (instance*,index)
// pairs, where the instance must be traced.
//
// A table of AnyRef holds JSObject pointers, which must be traced.
@ -44,11 +44,11 @@ class Table : public ShareableBase<Table> {
using InstanceSet = JS::WeakCache<GCHashSet<
WeakHeapPtrWasmInstanceObject,
MovableCellHasher<WeakHeapPtrWasmInstanceObject>, SystemAllocPolicy>>;
using UniqueAnyFuncArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>;
using UniqueFuncRefArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>;
WeakHeapPtrWasmTableObject maybeObject_;
InstanceSet observers_;
UniqueAnyFuncArray functions_; // either functions_ has data
UniqueFuncRefArray functions_; // either functions_ has data
TableAnyRefVector objects_; // or objects_, but not both
const TableKind kind_;
uint32_t length_;
@ -57,7 +57,7 @@ class Table : public ShareableBase<Table> {
template <class>
friend struct js::MallocProvider;
Table(JSContext* cx, const TableDesc& td, HandleWasmTableObject maybeObject,
UniqueAnyFuncArray functions);
UniqueFuncRefArray functions);
Table(JSContext* cx, const TableDesc& td, HandleWasmTableObject maybeObject,
TableAnyRefVector&& objects);
@ -70,9 +70,8 @@ class Table : public ShareableBase<Table> {
void trace(JSTracer* trc);
TableKind kind() const { return kind_; }
bool isTypedFunction() const { return kind_ == TableKind::TypedFunction; }
bool isFunction() const {
return kind_ == TableKind::AnyFunction || kind_ == TableKind::TypedFunction;
return kind_ == TableKind::FuncRef || kind_ == TableKind::AsmJS;
}
uint32_t length() const { return length_; }
Maybe<uint32_t> maximum() const { return maximum_; }
@ -80,14 +79,13 @@ class Table : public ShareableBase<Table> {
// Only for function values. Raw pointer to the table.
uint8_t* functionBase() const;
// get/setAnyFunc is allowed only on table-of-funcref.
// get/setFuncRef is allowed only on table-of-funcref.
// get/setAnyRef is allowed only on table-of-anyref.
// setNull is allowed on either.
const FunctionTableElem& getAnyFunc(uint32_t index) const;
void setAnyFunc(uint32_t index, void* code, const Instance* instance);
const FunctionTableElem& getFuncRef(uint32_t index) const;
void setFuncRef(uint32_t index, void* code, const Instance* instance);
AnyRef getAnyRef(uint32_t index) const;
const void* getShortlivedAnyRefLocForCompiledCode(uint32_t index) const;
void setAnyRef(uint32_t index, AnyRef);
void setNull(uint32_t index);

Просмотреть файл

@ -90,7 +90,6 @@ class WasmToken {
Field,
Float,
Func,
FuncRef,
#ifdef ENABLE_WASM_GC
GcFeatureOptIn,
#endif
@ -376,7 +375,6 @@ class WasmToken {
case Field:
case Float:
case Func:
case FuncRef:
#ifdef ENABLE_WASM_GC
case GcFeatureOptIn:
#endif
@ -954,7 +952,7 @@ WasmToken WasmTokenStream::next() {
return WasmToken(WasmToken::Align, begin, cur_);
}
if (consume(u"anyfunc")) {
return WasmToken(WasmToken::FuncRef, begin, cur_);
return WasmToken(WasmToken::ValueType, ValType::FuncRef, begin, cur_);
}
if (consume(u"anyref")) {
return WasmToken(WasmToken::ValueType, ValType::AnyRef, begin, cur_);
@ -1037,7 +1035,7 @@ WasmToken WasmTokenStream::next() {
}
if (consume(u"funcref")) {
return WasmToken(WasmToken::FuncRef, begin, cur_);
return WasmToken(WasmToken::ValueType, ValType::FuncRef, begin, cur_);
}
if (consume(u"func")) {
@ -3973,7 +3971,7 @@ static AstExpr* ParseStructNarrow(WasmParseContext& c, bool inParens) {
return nullptr;
}
if (!inputType.isRefType()) {
if (!inputType.isNarrowType()) {
c.ts.generateError(c.ts.peek(), "struct.narrow requires ref type", c.error);
return nullptr;
}
@ -3983,7 +3981,7 @@ static AstExpr* ParseStructNarrow(WasmParseContext& c, bool inParens) {
return nullptr;
}
if (!outputType.isRefType()) {
if (!outputType.isNarrowType()) {
c.ts.generateError(c.ts.peek(), "struct.narrow requires ref type", c.error);
return nullptr;
}
@ -4719,20 +4717,19 @@ static bool ParseGlobalType(WasmParseContext& c, AstValType* type,
static bool ParseElemType(WasmParseContext& c, TableKind* tableKind) {
WasmToken token;
if (c.ts.getIf(WasmToken::FuncRef, &token)) {
*tableKind = TableKind::AnyFunction;
return true;
}
if (c.ts.getIf(WasmToken::ValueType, &token)) {
if (token.valueType() == ValType::FuncRef) {
*tableKind = TableKind::FuncRef;
return true;
}
#ifdef ENABLE_WASM_REFTYPES
if (c.ts.getIf(WasmToken::ValueType, &token) &&
token.valueType() == ValType::AnyRef) {
*tableKind = TableKind::AnyRef;
return true;
if (token.valueType() == ValType::AnyRef) {
*tableKind = TableKind::AnyRef;
return true;
}
#endif
}
c.ts.generateError(token, "'funcref' or 'anyref' required", c.error);
#else
c.ts.generateError(token, "'funcref' required", c.error);
#endif
return false;
}
@ -6858,8 +6855,8 @@ static bool EncodeLimits(Encoder& e, const Limits& limits) {
static bool EncodeTableLimits(Encoder& e, const Limits& limits,
TableKind tableKind) {
switch (tableKind) {
case TableKind::AnyFunction:
if (!e.writeVarU32(uint32_t(TypeCode::AnyFunc))) {
case TableKind::FuncRef:
if (!e.writeVarU32(uint32_t(TypeCode::FuncRef))) {
return false;
}
break;
@ -7280,7 +7277,7 @@ static bool EncodeElemSegment(Encoder& e, AstElemSegment& segment) {
}
if (segment.isPassive()) {
if (!e.writeFixedU8(uint8_t(TypeCode::AnyFunc))) {
if (!e.writeFixedU8(uint8_t(TypeCode::FuncRef))) {
return false;
}
}

Просмотреть файл

@ -78,10 +78,9 @@ Val::Val(const LitVal& val) {
u.f64_ = val.f64();
return;
case ValType::Ref:
u.ref_ = val.ref();
return;
case ValType::FuncRef:
case ValType::AnyRef:
u.anyref_ = val.anyref();
u.ref_ = val.ref();
return;
case ValType::NullRef:
break;
@ -90,16 +89,12 @@ Val::Val(const LitVal& val) {
}
void Val::trace(JSTracer* trc) {
if (type_.isValid()) {
if (type_.isRef() && u.ref_) {
TraceManuallyBarrieredEdge(trc, &u.ref_, "wasm ref/anyref global");
} else if (type_ == ValType::AnyRef && !u.anyref_.isNull()) {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
TraceManuallyBarrieredEdge(trc, u.anyref_.asJSObjectAddress(),
"wasm ref/anyref global");
}
if (type_.isValid() && type_.isReference() && !u.ref_.isNull()) {
// TODO/AnyRef-boxing: With boxed immediates and strings, the write
// barrier is going to have to be more complicated.
ASSERT_ANYREF_IS_JSOBJECT;
TraceManuallyBarrieredEdge(trc, u.ref_.asJSObjectAddress(),
"wasm reference-typed global");
}
}
@ -273,6 +268,7 @@ static bool IsImmediateType(ValType vt) {
case ValType::I64:
case ValType::F32:
case ValType::F64:
case ValType::FuncRef:
case ValType::AnyRef:
return true;
case ValType::NullRef:
@ -293,8 +289,10 @@ static unsigned EncodeImmediateType(ValType vt) {
return 2;
case ValType::F64:
return 3;
case ValType::AnyRef:
case ValType::FuncRef:
return 4;
case ValType::AnyRef:
return 5;
case ValType::NullRef:
case ValType::Ref:
break;
@ -724,6 +722,7 @@ void DebugFrame::updateReturnJSValue() {
case ExprType::Ref:
cachedReturnJSValue_ = ObjectOrNullValue((JSObject*)resultRef_);
break;
case ExprType::FuncRef:
case ExprType::AnyRef:
cachedReturnJSValue_ = UnboxAnyRef(resultAnyRef_);
break;

Просмотреть файл

@ -255,6 +255,12 @@ static inline uint32_t UnpackTypeCodeIndex(PackedTypeCode ptc) {
return uint32_t(ptc) >> 8;
}
static inline bool IsReferenceType(PackedTypeCode ptc) {
TypeCode tc = UnpackTypeCodeType(ptc);
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
tc == TypeCode::FuncRef || tc == TypeCode::NullRef;
}
// The ExprType represents the type of a WebAssembly expression or return value
// and may either be a ValType or void.
//
@ -275,6 +281,7 @@ class ExprType {
case TypeCode::F32:
case TypeCode::F64:
case TypeCode::AnyRef:
case TypeCode::FuncRef:
case TypeCode::NullRef:
case TypeCode::Ref:
case TypeCode::BlockVoid:
@ -295,6 +302,7 @@ class ExprType {
F32 = uint8_t(TypeCode::F32),
F64 = uint8_t(TypeCode::F64),
AnyRef = uint8_t(TypeCode::AnyRef),
FuncRef = uint8_t(TypeCode::FuncRef),
NullRef = uint8_t(TypeCode::NullRef),
Ref = uint8_t(TypeCode::Ref),
@ -321,32 +329,23 @@ class ExprType {
explicit inline ExprType(const ValType& t);
PackedTypeCode packed() const { return tc_; }
PackedTypeCode* packedPtr() { return &tc_; }
Code code() const { return Code(UnpackTypeCodeType(tc_)); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isValid() const { return IsValid(tc_); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
bool isReference() const {
TypeCode tc = UnpackTypeCodeType(tc_);
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
tc == TypeCode::NullRef;
}
bool isReference() const { return IsReferenceType(tc_); }
bool operator==(const ExprType& that) const { return tc_ == that.tc_; }
bool operator!=(const ExprType& that) const { return tc_ != that.tc_; }
bool operator==(Code that) const {
MOZ_ASSERT(that != Code::Ref);
return code() == that;
}
bool operator!=(Code that) const { return !(*this == that); }
};
@ -364,6 +363,7 @@ class ValType {
case TypeCode::F32:
case TypeCode::F64:
case TypeCode::AnyRef:
case TypeCode::FuncRef:
case TypeCode::NullRef:
case TypeCode::Ref:
return true;
@ -381,6 +381,7 @@ class ValType {
F64 = uint8_t(TypeCode::F64),
AnyRef = uint8_t(TypeCode::AnyRef),
FuncRef = uint8_t(TypeCode::FuncRef),
NullRef = uint8_t(TypeCode::NullRef),
Ref = uint8_t(TypeCode::Ref),
};
@ -431,27 +432,19 @@ class ValType {
Code code() const { return Code(UnpackTypeCodeType(tc_)); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isValid() const { return IsValid(tc_); }
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
bool isReference() const {
TypeCode tc = UnpackTypeCodeType(tc_);
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
tc == TypeCode::NullRef;
}
bool isReference() const { return IsReferenceType(tc_); }
bool operator==(const ValType& that) const { return tc_ == that.tc_; }
bool operator!=(const ValType& that) const { return tc_ != that.tc_; }
bool operator==(Code that) const {
MOZ_ASSERT(that != Code::Ref);
return code() == that;
}
bool operator!=(Code that) const { return !(*this == that); }
};
@ -471,6 +464,7 @@ static inline unsigned SizeOf(ValType vt) {
case ValType::F64:
return 8;
case ValType::AnyRef:
case ValType::FuncRef:
case ValType::NullRef:
case ValType::Ref:
return sizeof(intptr_t);
@ -490,6 +484,7 @@ static inline jit::MIRType ToMIRType(ValType vt) {
return jit::MIRType::Double;
case ValType::Ref:
case ValType::AnyRef:
case ValType::FuncRef:
case ValType::NullRef:
return jit::MIRType::RefOrNull;
}
@ -527,6 +522,8 @@ static inline const char* ToCString(ExprType type) {
return "f64";
case ExprType::AnyRef:
return "anyref";
case ExprType::FuncRef:
return "funcref";
case ExprType::NullRef:
return "nullref";
case ExprType::Ref:
@ -571,11 +568,16 @@ static inline const char* ToCString(ValType type) {
class AnyRef {
JSObject* value_;
explicit AnyRef() : value_((JSObject*)-1) {}
explicit AnyRef(JSObject* p) : value_(p) {
MOZ_ASSERT(((uintptr_t)p & 0x03) == 0);
}
public:
// An invalid AnyRef cannot arise naturally from wasm and so can be used as
// a sentinel value to indicate failure from an AnyRef-returning function.
static AnyRef invalid() { return AnyRef(); }
// Given a void* that comes from compiled wasm code, turn it into AnyRef.
static AnyRef fromCompiledCode(void* p) { return AnyRef((JSObject*)p); }
@ -702,8 +704,7 @@ class LitVal {
uint64_t i64_;
float f32_;
double f64_;
JSObject* ref_; // Note, this breaks an abstraction boundary
AnyRef anyref_;
AnyRef ref_;
} u;
public:
@ -715,17 +716,11 @@ class LitVal {
explicit LitVal(float f32) : type_(ValType::F32) { u.f32_ = f32; }
explicit LitVal(double f64) : type_(ValType::F64) { u.f64_ = f64; }
explicit LitVal(AnyRef any) : type_(ValType::AnyRef) {
explicit LitVal(ValType type, AnyRef any) : type_(type) {
MOZ_ASSERT(type.isReference());
MOZ_ASSERT(any.isNull(),
"use Val for non-nullptr ref types to get tracing");
u.anyref_ = any;
}
explicit LitVal(ValType refType, JSObject* ref) : type_(refType) {
MOZ_ASSERT(refType.isRef());
MOZ_ASSERT(ref == nullptr,
"use Val for non-nullptr ref types to get tracing");
u.ref_ = ref;
u.ref_ = any;
}
ValType type() const { return type_; }
@ -747,14 +742,10 @@ class LitVal {
MOZ_ASSERT(type_ == ValType::F64);
return u.f64_;
}
JSObject* ref() const {
MOZ_ASSERT(type_.isRef());
AnyRef ref() const {
MOZ_ASSERT(type_.isReference());
return u.ref_;
}
AnyRef anyref() const {
MOZ_ASSERT(type_ == ValType::AnyRef);
return u.anyref_;
}
};
// A Val is a LitVal that can contain (non-null) pointers to GC things. All Vals
@ -770,9 +761,9 @@ class MOZ_NON_PARAM Val : public LitVal {
explicit Val(uint64_t i64) : LitVal(i64) {}
explicit Val(float f32) : LitVal(f32) {}
explicit Val(double f64) : LitVal(f64) {}
explicit Val(AnyRef val) : LitVal(AnyRef::null()) { u.anyref_ = val; }
explicit Val(ValType type, JSObject* obj) : LitVal(type, (JSObject*)nullptr) {
u.ref_ = obj;
explicit Val(ValType type, AnyRef val) : LitVal(type, AnyRef::null()) {
MOZ_ASSERT(type.isReference());
u.ref_ = val;
}
void trace(JSTracer* trc);
};
@ -1863,6 +1854,7 @@ enum class SymbolicAddress {
CallImport_I32,
CallImport_I64,
CallImport_F64,
CallImport_FuncRef,
CallImport_AnyRef,
CoerceInPlace_ToInt32,
CoerceInPlace_ToNumber,
@ -1913,6 +1905,18 @@ enum class SymbolicAddress {
Limit
};
// The FailureMode indicates whether, immediately after a call to a builtin
// returns, the return value should be checked against an error condition
// (and if so, which one) which signals that the C++ calle has already
// reported an error and thus wasm needs to wasmTrap(Trap::ThrowReported).
enum class FailureMode : uint8_t {
Infallible,
FailOnNegI32,
FailOnNullPtr,
FailOnInvalidRef
};
// SymbolicAddressSignature carries type information for a function referred
// to by a SymbolicAddress. In order that |argTypes| can be written out as a
// static initialiser, it has to have fixed length. At present
@ -1928,6 +1932,8 @@ struct SymbolicAddressSignature {
const SymbolicAddress identity;
// The return type, or MIRType::None to denote 'void'.
const jit::MIRType retType;
// The failure mode, which is checked by masm.wasmCallBuiltinInstanceMethod.
const FailureMode failureMode;
// The number of arguments, 0 .. SymbolicAddressSignatureMaxArgs only.
const uint8_t numArgs;
// The argument types; SymbolicAddressSignatureMaxArgs + 1 guard, which
@ -1966,10 +1972,13 @@ struct Limits {
};
// TableDesc describes a table as well as the offset of the table's base pointer
// in global memory. Currently, wasm only has "any function" and asm.js only
// "typed function".
// in global memory. The TableKind determines the representation:
// - AnyRef: a wasm anyref word (wasm::AnyRef)
// - FuncRef: a two-word FunctionTableElem (wasm indirect call ABI)
// - AsmJS: a two-word FunctionTableElem (asm.js ABI)
// Eventually there should be a single unified AnyRef representation.
enum class TableKind { AnyFunction, AnyRef, TypedFunction };
enum class TableKind { AnyRef, FuncRef, AsmJS };
struct TableDesc {
TableKind kind;
@ -2106,8 +2115,8 @@ struct TableTls {
void* functionBase;
};
// Table elements for TableKind::AnyFunctions carry both the code pointer and an
// instance pointer.
// Table element for TableKind::FuncRef which carries both the code pointer and
// an instance pointer.
struct FunctionTableElem {
// The code to call when calling this element. The table ABI is the system

Просмотреть файл

@ -1316,6 +1316,7 @@ static bool DecodeStructType(Decoder& d, ModuleEnvironment* env,
case ValType::Ref:
offset = layout.addReference(ReferenceType::TYPE_OBJECT);
break;
case ValType::FuncRef:
case ValType::AnyRef:
offset = layout.addReference(ReferenceType::TYPE_WASM_ANYREF);
break;
@ -1562,8 +1563,8 @@ static bool DecodeTableTypeAndLimits(Decoder& d, bool gcTypesEnabled,
}
TableKind tableKind;
if (elementType == uint8_t(TypeCode::AnyFunc)) {
tableKind = TableKind::AnyFunction;
if (elementType == uint8_t(TypeCode::FuncRef)) {
tableKind = TableKind::FuncRef;
#ifdef ENABLE_WASM_REFTYPES
} else if (elementType == uint8_t(TypeCode::AnyRef)) {
tableKind = TableKind::AnyRef;
@ -1602,6 +1603,7 @@ static bool GlobalIsJSCompatible(Decoder& d, ValType type, bool isMutable) {
case ValType::F32:
case ValType::F64:
case ValType::I64:
case ValType::FuncRef:
case ValType::AnyRef:
break;
#ifdef WASM_PRIVATE_REFTYPES
@ -1937,14 +1939,8 @@ static bool DecodeInitializerExpression(Decoder& d, ModuleEnvironment* env,
return d.fail(
"type mismatch: initializer type and expected type don't match");
}
if (expected == ValType::AnyRef) {
*init = InitExpr(LitVal(AnyRef::null()));
} else {
if (!env->gcTypesEnabled()) {
return d.fail("unexpected initializer expression");
}
*init = InitExpr(LitVal(expected, nullptr));
}
MOZ_ASSERT_IF(expected.isRef(), env->gcTypesEnabled());
*init = InitExpr(LitVal(expected, AnyRef::null()));
break;
}
case uint16_t(Op::GetGlobal): {
@ -2271,7 +2267,7 @@ static bool DecodeElemSection(Decoder& d, ModuleEnvironment* env) {
// segments, there really is no segment index, and we should never
// touch the field.
tableIndex = (uint32_t)-1;
} else if (env->tables[tableIndex].kind != TableKind::AnyFunction) {
} else if (env->tables[tableIndex].kind != TableKind::FuncRef) {
return d.fail("only tables of 'funcref' may have element segments");
}
@ -2292,7 +2288,7 @@ static bool DecodeElemSection(Decoder& d, ModuleEnvironment* env) {
if (!d.readFixedU8(&form)) {
return d.fail("expected type form");
}
if (form != uint8_t(TypeCode::AnyFunc)) {
if (form != uint8_t(TypeCode::FuncRef)) {
return d.fail(
"passive segments can only contain function references");
}

Просмотреть файл

@ -611,6 +611,7 @@ class Decoder {
*type = ValType::Code(code);
return true;
#ifdef ENABLE_WASM_REFTYPES
case uint8_t(ValType::FuncRef):
case uint8_t(ValType::AnyRef):
*type = ValType::Code(code);
return true;

Просмотреть файл

@ -130,7 +130,7 @@ this.browserAction = class extends ExtensionAPI {
browserActionMap.set(extension, browserAction);
}
onShutdown(reason) {
onShutdown() {
let {extension} = this;
if (browserActionMap.has(extension)) {

Просмотреть файл

@ -215,7 +215,7 @@ this.pageAction = class extends ExtensionAPI {
pageActionMap.set(extension, pageAction);
}
onShutdown(reason) {
onShutdown() {
let {extension} = this;
if (pageActionMap.has(extension)) {

Просмотреть файл

@ -2033,7 +2033,6 @@ class Extension extends ExtensionData {
async shutdown(reason) {
this.state = "Shutdown";
this.shutdownReason = reason;
this.hasShutdown = true;
if (!this.policy) {
@ -2061,7 +2060,8 @@ class Extension extends ExtensionData {
this.state = "Shutdown: Flushed jar cache";
}
if (this.cleanupFile || reason !== "APP_SHUTDOWN") {
const isAppShutdown = reason === "APP_SHUTDOWN";
if (this.cleanupFile || !isAppShutdown) {
StartupCache.clearAddonData(this.id);
}
@ -2074,7 +2074,7 @@ class Extension extends ExtensionData {
Services.ppmm.removeMessageListener(this.MESSAGE_EMIT_EVENT, this);
this.updatePermissions(this.shutdownReason);
this.updatePermissions(reason);
if (!this.manifest) {
this.state = "Shutdown: Complete: No manifest";
@ -2089,10 +2089,10 @@ class Extension extends ExtensionData {
obj.close();
}
ParentAPIManager.shutdownExtension(this.id);
ParentAPIManager.shutdownExtension(this.id, reason);
Management.emit("shutdown", this);
this.emit("shutdown");
this.emit("shutdown", isAppShutdown);
const TIMED_OUT = Symbol();

Просмотреть файл

@ -334,9 +334,9 @@ class ExtensionAPI extends EventEmitter {
this.extension = extension;
extension.once("shutdown", () => {
extension.once("shutdown", (what, isAppShutdown) => {
if (this.onShutdown) {
this.onShutdown(extension.shutdownReason);
this.onShutdown(isAppShutdown);
}
this.extension = null;
});

Просмотреть файл

@ -19,6 +19,7 @@ const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm");
const {XPCOMUtils} = ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
XPCOMUtils.defineLazyModuleGetters(this, {
AddonManager: "resource://gre/modules/AddonManager.jsm",
AppConstants: "resource://gre/modules/AppConstants.jsm",
AsyncShutdown: "resource://gre/modules/AsyncShutdown.jsm",
DeferredTask: "resource://gre/modules/DeferredTask.jsm",
@ -112,6 +113,17 @@ let apiManager = new class extends SchemaAPIManager {
}));
});
/* eslint-enable mozilla/balanced-listeners */
// Handle any changes that happened during startup
let disabledIds = AddonManager.getStartupChanges(AddonManager.STARTUP_CHANGE_DISABLED);
if (disabledIds.length > 0) {
this._callHandlers(disabledIds, "disable", "onDisable");
}
let uninstalledIds = AddonManager.getStartupChanges(AddonManager.STARTUP_CHANGE_UNINSTALLED);
if (uninstalledIds.length > 0) {
this._callHandlers(uninstalledIds, "uninstall", "onUninstall");
}
}
getModuleJSONURLs() {
@ -183,6 +195,23 @@ let apiManager = new class extends SchemaAPIManager {
}
}
}
// Call static handlers for the given event on the given extension ids,
// and set up a shutdown blocker to ensure they all complete.
_callHandlers(ids, event, method) {
let promises = Array.from(this.eventModules.get(event))
.map(async modName => {
let module = await this.asyncLoadModule(modName);
return ids.map(id => module[method](id));
}).flat();
if (event === "disable") {
promises.push(...ids.map(id => this.emit("disable", id)));
}
AsyncShutdown.profileBeforeChange.addBlocker(
`Extension API ${event} handlers for ${ids.join(",")}`,
Promise.all(promises));
}
}();
// A proxy for extension ports between two DISTINCT message managers.
@ -825,17 +854,13 @@ ParentAPIManager = {
}
},
shutdownExtension(extensionId) {
shutdownExtension(extensionId, reason) {
if (["ADDON_DISABLE", "ADDON_UNINSTALL"].includes(reason)) {
apiManager._callHandlers([extensionId], "disable", "onDisable");
}
for (let [childId, context] of this.proxyContexts) {
if (context.extension.id == extensionId) {
if (["ADDON_DISABLE", "ADDON_UNINSTALL"].includes(context.extension.shutdownReason)) {
let modules = apiManager.eventModules.get("disable");
Array.from(modules).map(async apiName => {
let module = await apiManager.asyncLoadModule(apiName);
module.onDisable(extensionId);
});
}
context.shutdown();
this.proxyContexts.delete(childId);
}

Просмотреть файл

@ -40,10 +40,8 @@ Management.on("uninstall", (type, {id}) => {
ExtensionPreferencesManager.removeAll(id);
});
Management.on("shutdown", (type, extension) => {
if (extension.shutdownReason == "ADDON_DISABLE") {
this.ExtensionPreferencesManager.disableAll(extension.id);
}
Management.on("disable", (type, id) => {
this.ExtensionPreferencesManager.disableAll(id);
});
Management.on("startup", async (type, extension) => {

Просмотреть файл

@ -50,11 +50,11 @@ this.protocolHandlers = class extends ExtensionAPI {
}
}
onShutdown(shutdownReason) {
onShutdown(isAppShutdown) {
let {extension} = this;
let {manifest} = extension;
if (shutdownReason === "APP_SHUTDOWN") {
if (isAppShutdown) {
return;
}

Просмотреть файл

@ -383,8 +383,8 @@ this.theme = class extends ExtensionAPI {
});
}
onShutdown(reason) {
if (reason === "APP_SHUTDOWN") {
onShutdown(isAppShutdown) {
if (isAppShutdown) {
return;
}

Просмотреть файл

@ -2319,6 +2319,9 @@ var XPIProvider = {
if (AddonManager.getStartupChanges(AddonManager.STARTUP_CHANGE_INSTALLED)
.includes(addon.id))
reason = BOOTSTRAP_REASONS.ADDON_INSTALL;
else if (AddonManager.getStartupChanges(AddonManager.STARTUP_CHANGE_ENABLED)
.includes(addon.id))
reason = BOOTSTRAP_REASONS.ADDON_ENABLE;
BootstrapScope.get(addon).startup(reason);
} catch (e) {
logger.error("Failed to load bootstrap addon " + addon.id + " from " +

Просмотреть файл

@ -0,0 +1,43 @@
createAppInfo("xpcshell@tessts.mozilla.org", "XPCShell", "1", "1");
BootstrapMonitor.init();
// Test that enabling an extension during startup generates the
// proper reason for startup().
add_task(async function test_startup_enable() {
const ID = "compat@tests.mozilla.org";
await promiseStartupManager();
await promiseInstallWebExtension({
manifest: {
applications: {
gecko: {
id: ID,
strict_min_version: "1",
strict_max_version: "1",
},
},
},
});
BootstrapMonitor.checkInstalled(ID);
BootstrapMonitor.checkStarted(ID);
let {reason} = BootstrapMonitor.started.get(ID);
equal(reason, BOOTSTRAP_REASONS.ADDON_INSTALL,
"Startup reason is ADDON_INSTALL at install");
gAppInfo.platformVersion = "2";
await promiseRestartManager("2");
BootstrapMonitor.checkInstalled(ID);
BootstrapMonitor.checkNotStarted(ID);
gAppInfo.platformVersion = "1";
await promiseRestartManager("1");
BootstrapMonitor.checkInstalled(ID);
BootstrapMonitor.checkStarted(ID);
({reason} = BootstrapMonitor.started.get(ID));
equal(reason, BOOTSTRAP_REASONS.ADDON_ENABLE,
"Startup reason is ADDON_ENABLE when re-enabled at startup");
});

Просмотреть файл

@ -103,6 +103,7 @@ skip-if = require_signing || !allow_legacy_extensions
[test_startup.js]
# Bug 676992: test consistently fails on Android
fail-if = os == "android"
[test_startup_enable.js]
[test_strictcompatibility.js]
head = head_addons.js head_compat.js
[test_syncGUID.js]