зеркало из https://github.com/mozilla/gecko-dev.git
Merge mozilla-central to mozilla-inbound. a=merge
This commit is contained in:
Коммит
d71fce60ce
13
.cron.yml
13
.cron.yml
|
@ -145,3 +145,16 @@ jobs:
|
|||
mozilla-esr60:
|
||||
- {weekday: 'Monday', hour: 10, minute: 0}
|
||||
- {weekday: 'Thursday', hour: 10, minute: 0}
|
||||
|
||||
- name: pipfile-update
|
||||
job:
|
||||
type: decision-task
|
||||
treeherder-symbol: Nfile
|
||||
target-tasks-method: pipfile_update
|
||||
run-on-projects:
|
||||
- mozilla-central
|
||||
when:
|
||||
by-project:
|
||||
# No default branch
|
||||
mozilla-central:
|
||||
- {weekday: 'Monday', hour: 10, minute: 0}
|
||||
|
|
1
.flake8
1
.flake8
|
@ -21,3 +21,4 @@ exclude =
|
|||
security/nss/,
|
||||
testing/mochitest/pywebsocket,
|
||||
tools/lint/test/files,
|
||||
build/build-infer/build-infer.py,
|
||||
|
|
|
@ -34,11 +34,9 @@ var gTabsPanel = {
|
|||
|
||||
this.initElements();
|
||||
|
||||
let hiddenTabsMenuButton = document.getElementById("allTabsMenu-hiddenTabsButton");
|
||||
let hiddenTabsSeparator = document.getElementById("allTabsMenu-hiddenTabsSeparator");
|
||||
this.hiddenAudioTabsPopup = new TabsPanel({
|
||||
view: this.allTabsView,
|
||||
insertBefore: hiddenTabsSeparator,
|
||||
insertBefore: document.getElementById("allTabsMenu-tabsSeparator"),
|
||||
filterFn: (tab) => tab.hidden && tab.soundPlaying,
|
||||
});
|
||||
this.allTabsPanel = new TabsPanel({
|
||||
|
@ -47,8 +45,6 @@ var gTabsPanel = {
|
|||
filterFn: (tab) => !tab.pinned && !tab.hidden,
|
||||
});
|
||||
|
||||
let containerTabsButton = document.getElementById("allTabsMenu-containerTabsButton");
|
||||
let containerTabsSeparator = document.getElementById("allTabsMenu-containerTabsSeparator");
|
||||
this.allTabsView.addEventListener("ViewShowing", (e) => {
|
||||
PanelUI._ensureShortcutsShown(this.allTabsView);
|
||||
e.target.querySelector(".undo-close-tab").disabled =
|
||||
|
@ -56,12 +52,16 @@ var gTabsPanel = {
|
|||
|
||||
let containersEnabled = Services.prefs.getBoolPref("privacy.userContext.enabled")
|
||||
&& !PrivateBrowsingUtils.isWindowPrivate(window);
|
||||
containerTabsButton.hidden = !containersEnabled;
|
||||
containerTabsSeparator.hidden = !containersEnabled;
|
||||
document.getElementById("allTabsMenu-containerTabsButton")
|
||||
.hidden = !containersEnabled;
|
||||
document.getElementById("allTabsMenu-containerTabsSeparator")
|
||||
.hidden = !containersEnabled;
|
||||
|
||||
let hasHiddenTabs = gBrowser.visibleTabs.length < gBrowser.tabs.length;
|
||||
hiddenTabsMenuButton.hidden = !hasHiddenTabs;
|
||||
hiddenTabsSeparator.hidden = !hasHiddenTabs;
|
||||
document.getElementById("allTabsMenu-hiddenTabsButton")
|
||||
.hidden = !hasHiddenTabs;
|
||||
document.getElementById("allTabsMenu-hiddenTabsSeparator")
|
||||
.hidden = !hasHiddenTabs;
|
||||
});
|
||||
|
||||
this.allTabsView.addEventListener("ViewShown", (e) => {
|
||||
|
|
|
@ -46,17 +46,14 @@ const ContentThemeController = {
|
|||
*/
|
||||
init() {
|
||||
addEventListener("LightweightTheme:Set", this);
|
||||
|
||||
const event = new CustomEvent("LightweightTheme:Support", {bubbles: true});
|
||||
document.dispatchEvent(event);
|
||||
},
|
||||
|
||||
/**
|
||||
* Handle theme updates from the frame script.
|
||||
* @param {Object} event object containing the theme update.
|
||||
*/
|
||||
handleEvent({ detail }) {
|
||||
if (detail.type == "LightweightTheme:Update") {
|
||||
handleEvent({ type, detail }) {
|
||||
if (type == "LightweightTheme:Set") {
|
||||
let {data} = detail;
|
||||
if (!data) {
|
||||
data = {};
|
||||
|
|
|
@ -24,8 +24,6 @@ ChromeUtils.defineModuleGetter(this, "ReaderMode",
|
|||
"resource://gre/modules/ReaderMode.jsm");
|
||||
ChromeUtils.defineModuleGetter(this, "PageStyleHandler",
|
||||
"resource:///modules/PageStyleHandler.jsm");
|
||||
ChromeUtils.defineModuleGetter(this, "LightweightThemeChildListener",
|
||||
"resource:///modules/LightweightThemeChildListener.jsm");
|
||||
|
||||
// TabChildGlobal
|
||||
var global = this;
|
||||
|
@ -89,33 +87,21 @@ addMessageListener("MixedContent:ReenableProtection", function() {
|
|||
docShell.mixedContentChannel = null;
|
||||
});
|
||||
|
||||
var LightweightThemeChildListenerStub = {
|
||||
_childListener: null,
|
||||
get childListener() {
|
||||
if (!this._childListener) {
|
||||
this._childListener = new LightweightThemeChildListener();
|
||||
}
|
||||
return this._childListener;
|
||||
},
|
||||
XPCOMUtils.defineLazyProxy(this, "LightweightThemeChildHelper",
|
||||
"resource:///modules/LightweightThemeChildHelper.jsm");
|
||||
|
||||
init() {
|
||||
addEventListener("LightweightTheme:Support", this, false, true);
|
||||
addMessageListener("LightweightTheme:Update", this);
|
||||
sendAsyncMessage("LightweightTheme:Request");
|
||||
this.init = null;
|
||||
},
|
||||
|
||||
handleEvent(event) {
|
||||
return this.childListener.handleEvent(event);
|
||||
},
|
||||
|
||||
receiveMessage(msg) {
|
||||
return this.childListener.receiveMessage(msg);
|
||||
},
|
||||
};
|
||||
|
||||
LightweightThemeChildListenerStub.init();
|
||||
let themeablePagesWhitelist = new Set([
|
||||
"about:home",
|
||||
"about:newtab",
|
||||
"about:welcome",
|
||||
]);
|
||||
|
||||
addEventListener("pageshow", function({ originalTarget }) {
|
||||
if (originalTarget.defaultView == content && themeablePagesWhitelist.has(content.document.documentURI)) {
|
||||
LightweightThemeChildHelper.listen(themeablePagesWhitelist);
|
||||
LightweightThemeChildHelper.update(chromeOuterWindowID, content);
|
||||
}
|
||||
}, false, true);
|
||||
|
||||
var AboutReaderListener = {
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@ const kDumpAllStacks = false;
|
|||
const whitelist = {
|
||||
components: new Set([
|
||||
"ContentProcessSingleton.js",
|
||||
"EnterprisePoliciesContent.js", // bug 1470324
|
||||
"extension-process-script.js",
|
||||
]),
|
||||
modules: new Set([
|
||||
|
@ -56,7 +55,6 @@ const whitelist = {
|
|||
"resource:///modules/ContentLinkHandler.jsm",
|
||||
"resource:///modules/ContentMetaHandler.jsm",
|
||||
"resource:///modules/PageStyleHandler.jsm",
|
||||
"resource:///modules/LightweightThemeChildListener.jsm",
|
||||
"resource://gre/modules/BrowserUtils.jsm",
|
||||
"resource://gre/modules/E10SUtils.jsm",
|
||||
"resource://gre/modules/PrivateBrowsingUtils.jsm",
|
||||
|
|
|
@ -43,22 +43,6 @@ XPCOMUtils.defineLazyGetter(this, "log", () => {
|
|||
});
|
||||
});
|
||||
|
||||
// ==== Start XPCOM Boilerplate ==== \\
|
||||
|
||||
// Factory object
|
||||
const EnterprisePoliciesFactory = {
|
||||
_instance: null,
|
||||
createInstance: function BGSF_createInstance(outer, iid) {
|
||||
if (outer != null)
|
||||
throw Cr.NS_ERROR_NO_AGGREGATION;
|
||||
return this._instance == null ?
|
||||
this._instance = new EnterprisePoliciesManager() : this._instance;
|
||||
}
|
||||
};
|
||||
|
||||
// ==== End XPCOM Boilerplate ==== //
|
||||
|
||||
// Constructor
|
||||
function EnterprisePoliciesManager() {
|
||||
Services.obs.addObserver(this, "profile-after-change", true);
|
||||
Services.obs.addObserver(this, "final-ui-startup", true);
|
||||
|
@ -67,14 +51,11 @@ function EnterprisePoliciesManager() {
|
|||
}
|
||||
|
||||
EnterprisePoliciesManager.prototype = {
|
||||
// for XPCOM
|
||||
classID: Components.ID("{ea4e1414-779b-458b-9d1f-d18e8efbc145}"),
|
||||
classID: Components.ID("{ea4e1414-779b-458b-9d1f-d18e8efbc145}"),
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsIObserver,
|
||||
Ci.nsISupportsWeakReference,
|
||||
Ci.nsIEnterprisePolicies]),
|
||||
|
||||
// redefine the default factory for XPCOMUtils
|
||||
_xpcom_factory: EnterprisePoliciesFactory,
|
||||
_xpcom_factory: XPCOMUtils.generateSingletonFactory(EnterprisePoliciesManager),
|
||||
|
||||
_initialize() {
|
||||
let provider = this._chooseProvider();
|
||||
|
@ -195,16 +176,16 @@ EnterprisePoliciesManager.prototype = {
|
|||
|
||||
DisallowedFeatures = {};
|
||||
|
||||
Services.ppmm.sharedData.delete("EnterprisePolicies:Status");
|
||||
Services.ppmm.sharedData.delete("EnterprisePolicies:DisallowedFeatures");
|
||||
|
||||
this._status = Ci.nsIEnterprisePolicies.UNINITIALIZED;
|
||||
for (let timing of Object.keys(this._callbacks)) {
|
||||
this._callbacks[timing] = [];
|
||||
}
|
||||
delete Services.ppmm.initialProcessData.policies;
|
||||
Services.ppmm.broadcastAsyncMessage("EnterprisePolicies:Restart", null);
|
||||
|
||||
let { PromiseUtils } = ChromeUtils.import("resource://gre/modules/PromiseUtils.jsm",
|
||||
{});
|
||||
|
||||
// Simulate the startup process. This step-by-step is a bit ugly but it
|
||||
// tries to emulate the same behavior as of a normal startup.
|
||||
|
||||
|
@ -259,21 +240,13 @@ EnterprisePoliciesManager.prototype = {
|
|||
},
|
||||
|
||||
disallowFeature(feature, neededOnContentProcess = false) {
|
||||
DisallowedFeatures[feature] = true;
|
||||
DisallowedFeatures[feature] = neededOnContentProcess;
|
||||
|
||||
// NOTE: For optimization purposes, only features marked as needed
|
||||
// on content process will be passed onto the child processes.
|
||||
if (neededOnContentProcess) {
|
||||
Services.ppmm.initialProcessData.policies
|
||||
.disallowedFeatures.push(feature);
|
||||
|
||||
if (Services.ppmm.childCount > 1) {
|
||||
// If there has been a content process already initialized, let's
|
||||
// broadcast the newly disallowed feature.
|
||||
Services.ppmm.broadcastAsyncMessage(
|
||||
"EnterprisePolicies:DisallowFeature", {feature}
|
||||
);
|
||||
}
|
||||
Services.ppmm.sharedData.set("EnterprisePolicies:DisallowedFeatures",
|
||||
new Set(Object.keys(DisallowedFeatures).filter(key => DisallowedFeatures[key])));
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -286,10 +259,7 @@ EnterprisePoliciesManager.prototype = {
|
|||
set status(val) {
|
||||
this._status = val;
|
||||
if (val != Ci.nsIEnterprisePolicies.INACTIVE) {
|
||||
Services.ppmm.initialProcessData.policies = {
|
||||
status: val,
|
||||
disallowedFeatures: [],
|
||||
};
|
||||
Services.ppmm.sharedData.set("EnterprisePolicies:Status", val);
|
||||
}
|
||||
return val;
|
||||
},
|
||||
|
|
|
@ -5,85 +5,23 @@
|
|||
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
|
||||
ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
|
||||
const PREF_LOGLEVEL = "browser.policies.loglevel";
|
||||
|
||||
XPCOMUtils.defineLazyGetter(this, "log", () => {
|
||||
let { ConsoleAPI } = ChromeUtils.import("resource://gre/modules/Console.jsm", {});
|
||||
return new ConsoleAPI({
|
||||
prefix: "Enterprise Policies Child",
|
||||
// tip: set maxLogLevel to "debug" and use log.debug() to create detailed
|
||||
// messages during development. See LOG_LEVELS in Console.jsm for details.
|
||||
maxLogLevel: "error",
|
||||
maxLogLevelPref: PREF_LOGLEVEL,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
// ==== Start XPCOM Boilerplate ==== \\
|
||||
|
||||
// Factory object
|
||||
const EnterprisePoliciesFactory = {
|
||||
_instance: null,
|
||||
createInstance: function BGSF_createInstance(outer, iid) {
|
||||
if (outer != null)
|
||||
throw Cr.NS_ERROR_NO_AGGREGATION;
|
||||
return this._instance == null ?
|
||||
this._instance = new EnterprisePoliciesManagerContent() : this._instance;
|
||||
}
|
||||
};
|
||||
|
||||
// ==== End XPCOM Boilerplate ==== //
|
||||
|
||||
|
||||
function EnterprisePoliciesManagerContent() {
|
||||
let policies = Services.cpmm.initialProcessData.policies;
|
||||
if (policies) {
|
||||
this._status = policies.status;
|
||||
// make a copy of the array so that we can keep adding to it
|
||||
// in a way that is not confusing.
|
||||
this._disallowedFeatures = policies.disallowedFeatures.slice();
|
||||
}
|
||||
|
||||
Services.cpmm.addMessageListener("EnterprisePolicies:DisallowFeature", this);
|
||||
Services.cpmm.addMessageListener("EnterprisePolicies:Restart", this);
|
||||
}
|
||||
|
||||
EnterprisePoliciesManagerContent.prototype = {
|
||||
// for XPCOM
|
||||
classID: Components.ID("{dc6358f8-d167-4566-bf5b-4350b5e6a7a2}"),
|
||||
classID: Components.ID("{dc6358f8-d167-4566-bf5b-4350b5e6a7a2}"),
|
||||
QueryInterface: ChromeUtils.generateQI([Ci.nsIEnterprisePolicies]),
|
||||
|
||||
// redefine the default factory for XPCOMUtils
|
||||
_xpcom_factory: EnterprisePoliciesFactory,
|
||||
|
||||
_status: Ci.nsIEnterprisePolicies.INACTIVE,
|
||||
|
||||
_disallowedFeatures: [],
|
||||
|
||||
receiveMessage({name, data}) {
|
||||
switch (name) {
|
||||
case "EnterprisePolicies:DisallowFeature":
|
||||
this._disallowedFeatures.push(data.feature);
|
||||
break;
|
||||
|
||||
case "EnterprisePolicies:Restart":
|
||||
this._disallowedFeatures = [];
|
||||
break;
|
||||
}
|
||||
},
|
||||
_xpcom_factory: XPCOMUtils.generateSingletonFactory(EnterprisePoliciesManagerContent),
|
||||
|
||||
get status() {
|
||||
return this._status;
|
||||
return Services.cpmm.sharedData.get("EnterprisePolicies:Status") ||
|
||||
Ci.nsIEnterprisePolicies.INACTIVE;
|
||||
},
|
||||
|
||||
isAllowed(feature) {
|
||||
return !this._disallowedFeatures.includes(feature);
|
||||
let disallowedFeatures = Services.cpmm.sharedData.get("EnterprisePolicies:DisallowedFeatures");
|
||||
return !(disallowedFeatures && disallowedFeatures.has(feature));
|
||||
},
|
||||
|
||||
getActivePolicies() {
|
||||
throw Cr.NS_ERROR_NOT_AVAILABLE;
|
||||
}
|
||||
};
|
||||
|
||||
var components = [EnterprisePoliciesManagerContent];
|
||||
this.NSGetFactory = XPCOMUtils.generateNSGetFactory(components);
|
||||
this.NSGetFactory = XPCOMUtils.generateNSGetFactory([EnterprisePoliciesManagerContent]);
|
||||
|
|
|
@ -101,7 +101,7 @@ var Policies = {
|
|||
"BlockAboutConfig": {
|
||||
onBeforeUIStartup(manager, param) {
|
||||
if (param) {
|
||||
blockAboutPage(manager, "about:config", true);
|
||||
blockAboutPage(manager, "about:config");
|
||||
setAndLockPref("devtools.chrome.enabled", false);
|
||||
}
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ var Policies = {
|
|||
"BlockAboutProfiles": {
|
||||
onBeforeUIStartup(manager, param) {
|
||||
if (param) {
|
||||
blockAboutPage(manager, "about:profiles", true);
|
||||
blockAboutPage(manager, "about:profiles");
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -118,7 +118,7 @@ var Policies = {
|
|||
"BlockAboutSupport": {
|
||||
onBeforeUIStartup(manager, param) {
|
||||
if (param) {
|
||||
blockAboutPage(manager, "about:support", true);
|
||||
blockAboutPage(manager, "about:support");
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -339,7 +339,7 @@ var Policies = {
|
|||
"DisableSetDesktopBackground": {
|
||||
onBeforeUIStartup(manager, param) {
|
||||
if (param) {
|
||||
manager.disallowFeature("setDesktopBackground", true);
|
||||
manager.disallowFeature("setDesktopBackground");
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -4,12 +4,6 @@
|
|||
"use strict";
|
||||
|
||||
add_task(async function test_simple_policies() {
|
||||
await ContentTask.spawn(gBrowser.selectedBrowser, null, async function() {
|
||||
// Initialize the service in the content process, in case it hasn't
|
||||
// already started.
|
||||
Services.policies;
|
||||
});
|
||||
|
||||
let { Policies } = ChromeUtils.import("resource:///modules/policies/Policies.jsm", {});
|
||||
|
||||
let policy0Ran = false, policy1Ran = false, policy2Ran = false, policy3Ran = false;
|
||||
|
|
|
@ -34,7 +34,7 @@ add_task(async function test_content_process() {
|
|||
try {
|
||||
Services.policies.getActivePolicies();
|
||||
} catch (ex) {
|
||||
is(ex.result, Cr.NS_ERROR_NOT_AVAILABLE, "Function getActivePolicies() doesn't have a valid definition in the content process");
|
||||
is(ex.result, Cr.NS_ERROR_XPC_JSOBJECT_HAS_NO_FUNCTION_NAMED, "Function getActivePolicies() doesn't have a valid definition in the content process");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
@ -4,27 +4,14 @@ async function testHasNoPermission(params) {
|
|||
let contentSetup = params.contentSetup || (() => Promise.resolve());
|
||||
|
||||
async function background(contentSetup) {
|
||||
browser.runtime.onMessage.addListener((msg, sender) => {
|
||||
browser.test.assertEq(msg, "second script ran", "second script ran");
|
||||
browser.test.notifyPass("executeScript");
|
||||
});
|
||||
|
||||
browser.test.onMessage.addListener(async msg => {
|
||||
browser.test.assertEq(msg, "execute-script");
|
||||
|
||||
let tabs = await browser.tabs.query({currentWindow: true});
|
||||
await browser.test.assertRejects(browser.tabs.executeScript({
|
||||
file: "script.js",
|
||||
}), /Missing host permission for the tab/);
|
||||
|
||||
// TODO bug 1457115: Remove the executeScript call below.
|
||||
|
||||
// Execute a script we know we have permissions for in the
|
||||
// second tab, in the hopes that it will execute after the
|
||||
// first one.
|
||||
browser.tabs.executeScript(tabs[1].id, {
|
||||
file: "second-script.js",
|
||||
});
|
||||
browser.test.notifyPass("executeScript");
|
||||
});
|
||||
|
||||
await contentSetup();
|
||||
|
@ -41,10 +28,6 @@ async function testHasNoPermission(params) {
|
|||
"script.js": function() {
|
||||
browser.runtime.sendMessage("first script ran");
|
||||
},
|
||||
|
||||
"second-script.js": function() {
|
||||
browser.runtime.sendMessage("second script ran");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -62,23 +45,22 @@ async function testHasNoPermission(params) {
|
|||
}
|
||||
|
||||
add_task(async function testBadPermissions() {
|
||||
let tab1 = await BrowserTestUtils.openNewForegroundTab(gBrowser, "http://example.com/");
|
||||
let tab2 = await BrowserTestUtils.openNewForegroundTab(gBrowser, "http://mochi.test:8888/");
|
||||
let tab = await BrowserTestUtils.openNewForegroundTab(gBrowser, "http://mochi.test:8888/");
|
||||
|
||||
info("Test no special permissions");
|
||||
await testHasNoPermission({
|
||||
manifest: {"permissions": ["http://example.com/"]},
|
||||
manifest: {"permissions": []},
|
||||
});
|
||||
|
||||
info("Test tabs permissions");
|
||||
await testHasNoPermission({
|
||||
manifest: {"permissions": ["http://example.com/", "tabs"]},
|
||||
manifest: {"permissions": ["tabs"]},
|
||||
});
|
||||
|
||||
info("Test no special permissions, commands, key press");
|
||||
await testHasNoPermission({
|
||||
manifest: {
|
||||
"permissions": ["http://example.com/"],
|
||||
"permissions": [],
|
||||
"commands": {
|
||||
"test-tabs-executeScript": {
|
||||
"suggested_key": {
|
||||
|
@ -104,7 +86,7 @@ add_task(async function testBadPermissions() {
|
|||
info("Test no special permissions, _execute_browser_action command");
|
||||
await testHasNoPermission({
|
||||
manifest: {
|
||||
"permissions": ["http://example.com/"],
|
||||
"permissions": [],
|
||||
"browser_action": {},
|
||||
"commands": {
|
||||
"_execute_browser_action": {
|
||||
|
@ -129,7 +111,7 @@ add_task(async function testBadPermissions() {
|
|||
info("Test no special permissions, _execute_page_action command");
|
||||
await testHasNoPermission({
|
||||
manifest: {
|
||||
"permissions": ["http://example.com/"],
|
||||
"permissions": [],
|
||||
"page_action": {},
|
||||
"commands": {
|
||||
"_execute_page_action": {
|
||||
|
@ -155,7 +137,7 @@ add_task(async function testBadPermissions() {
|
|||
info("Test active tab, commands, no key press");
|
||||
await testHasNoPermission({
|
||||
manifest: {
|
||||
"permissions": ["http://example.com/", "activeTab"],
|
||||
"permissions": ["activeTab"],
|
||||
"commands": {
|
||||
"test-tabs-executeScript": {
|
||||
"suggested_key": {
|
||||
|
@ -169,7 +151,7 @@ add_task(async function testBadPermissions() {
|
|||
info("Test active tab, browser action, no click");
|
||||
await testHasNoPermission({
|
||||
manifest: {
|
||||
"permissions": ["http://example.com/", "activeTab"],
|
||||
"permissions": ["activeTab"],
|
||||
"browser_action": {},
|
||||
},
|
||||
});
|
||||
|
@ -177,7 +159,7 @@ add_task(async function testBadPermissions() {
|
|||
info("Test active tab, page action, no click");
|
||||
await testHasNoPermission({
|
||||
manifest: {
|
||||
"permissions": ["http://example.com/", "activeTab"],
|
||||
"permissions": ["activeTab"],
|
||||
"page_action": {},
|
||||
},
|
||||
contentSetup: async function() {
|
||||
|
@ -186,8 +168,7 @@ add_task(async function testBadPermissions() {
|
|||
},
|
||||
});
|
||||
|
||||
BrowserTestUtils.removeTab(tab2);
|
||||
BrowserTestUtils.removeTab(tab1);
|
||||
BrowserTestUtils.removeTab(tab);
|
||||
});
|
||||
|
||||
add_task(async function testMatchDataURI() {
|
||||
|
|
|
@ -810,8 +810,9 @@ PlacesController.prototype = {
|
|||
// child of the "Tags" query in the library, in all other places we
|
||||
// must only remove the query node.
|
||||
let tag = node.title;
|
||||
let URIs = PlacesUtils.tagging.getURIsForTag(tag);
|
||||
transactions.push(PlacesTransactions.Untag({ tag, urls: URIs }));
|
||||
let urls = new Set();
|
||||
await PlacesUtils.bookmarks.fetch({tags: [tag]}, b => urls.add(b.url));
|
||||
transactions.push(PlacesTransactions.Untag({ tag, urls: Array.from(urls) }));
|
||||
} else if (PlacesUtils.nodeIsURI(node) &&
|
||||
PlacesUtils.nodeIsQuery(node.parent) &&
|
||||
PlacesUtils.asQuery(node.parent).queryOptions.queryType ==
|
||||
|
|
|
@ -87,7 +87,7 @@ add_task(async function test_tags() {
|
|||
}
|
||||
|
||||
// The tag should now not exist.
|
||||
Assert.deepEqual(PlacesUtils.tagging.getURIsForTag("test"), [],
|
||||
Assert.equal(await PlacesUtils.bookmarks.fetch({tags: ["test"]}), null,
|
||||
"There should be no URIs remaining for the tag");
|
||||
|
||||
tagsNode.containerOpen = false;
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
|
||||
var EXPORTED_SYMBOLS = ["LightweightThemeChildHelper"];
|
||||
|
||||
/**
|
||||
* LightweightThemeChildHelper forwards theme data to in-content pages.
|
||||
*/
|
||||
var LightweightThemeChildHelper = {
|
||||
listener: null,
|
||||
whitelist: [],
|
||||
|
||||
/**
|
||||
* Listen to theme updates for the current process
|
||||
* @param {Array} whitelist The pages that can receive theme updates.
|
||||
*/
|
||||
listen(whitelist) {
|
||||
if (!this.listener) {
|
||||
// Clone the whitelist to avoid leaking the global the whitelist
|
||||
// originates from.
|
||||
this.whitelist = new Set([...whitelist]);
|
||||
this.listener = ({ changedKeys }) => {
|
||||
if (changedKeys.find(change => change.startsWith("theme/"))) {
|
||||
this._updateProcess(changedKeys);
|
||||
}
|
||||
};
|
||||
Services.cpmm.sharedData.addEventListener("change", this.listener);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Update the theme data for the whole process
|
||||
* @param {Array} changedKeys The sharedData keys that were changed.
|
||||
*/
|
||||
_updateProcess(changedKeys) {
|
||||
const windowEnumerator = Services.ww.getWindowEnumerator();
|
||||
while (windowEnumerator.hasMoreElements()) {
|
||||
const window = windowEnumerator.getNext().QueryInterface(Ci.nsIDOMWindow);
|
||||
const tabChildGlobal = window.QueryInterface(Ci.nsIInterfaceRequestor)
|
||||
.getInterface(Ci.nsIDocShell)
|
||||
.sameTypeRootTreeItem
|
||||
.QueryInterface(Ci.nsIInterfaceRequestor)
|
||||
.getInterface(Ci.nsIContentFrameMessageManager);
|
||||
const {chromeOuterWindowID, content} = tabChildGlobal;
|
||||
if (changedKeys.includes(`theme/${chromeOuterWindowID}`) &&
|
||||
content && this.whitelist.has(content.document.documentURI)) {
|
||||
this.update(chromeOuterWindowID, content);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Forward the theme data to the page.
|
||||
* @param {Object} outerWindowID The outerWindowID the parent process window has.
|
||||
* @param {Object} content The receiving global
|
||||
*/
|
||||
update(outerWindowID, content) {
|
||||
const event = Cu.cloneInto({
|
||||
detail: {
|
||||
data: Services.cpmm.sharedData.get(`theme/${outerWindowID}`)
|
||||
},
|
||||
}, content);
|
||||
content.dispatchEvent(new content.CustomEvent("LightweightTheme:Set",
|
||||
event));
|
||||
},
|
||||
};
|
|
@ -1,82 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* LightweightThemeChildListener forwards theme updates from LightweightThemeConsumer to
|
||||
* the whitelisted in-content pages
|
||||
*/
|
||||
class LightweightThemeChildListener {
|
||||
constructor() {
|
||||
/**
|
||||
* The pages that will receive theme updates
|
||||
*/
|
||||
this.whitelist = new Set([
|
||||
"about:home",
|
||||
"about:newtab",
|
||||
"about:welcome",
|
||||
]);
|
||||
|
||||
/**
|
||||
* The last theme data received from LightweightThemeConsumer
|
||||
*/
|
||||
this._lastData = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles theme updates from the parent process
|
||||
* @param {Object} message from the parent process.
|
||||
*/
|
||||
receiveMessage({ name, data, target }) {
|
||||
if (name == "LightweightTheme:Update") {
|
||||
this._lastData = data;
|
||||
this._update(data, target.content);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles events from the content scope.
|
||||
* @param {Object} event The received event.
|
||||
*/
|
||||
handleEvent(event) {
|
||||
const content = event.originalTarget.defaultView;
|
||||
if (content != content.top) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type == "LightweightTheme:Support") {
|
||||
this._update(this._lastData, content);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a given global is allowed to receive theme updates
|
||||
* @param {Object} content The global to check against.
|
||||
* @returns {boolean} Whether the global is allowed to receive updates.
|
||||
*/
|
||||
_isContentWhitelisted(content) {
|
||||
return this.whitelist.has(content.document.documentURI);
|
||||
}
|
||||
|
||||
/**
|
||||
* Forward the theme data to the page.
|
||||
* @param {Object} data The theme data to forward
|
||||
* @param {Object} content The receiving global
|
||||
*/
|
||||
_update(data, content) {
|
||||
if (this._isContentWhitelisted(content)) {
|
||||
const event = Cu.cloneInto({
|
||||
detail: {
|
||||
type: "LightweightTheme:Update",
|
||||
data,
|
||||
},
|
||||
}, content);
|
||||
content.dispatchEvent(new content.CustomEvent("LightweightTheme:Set",
|
||||
event));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var EXPORTED_SYMBOLS = ["LightweightThemeChildListener"];
|
|
@ -64,7 +64,7 @@ with Files("ExtensionsUI.jsm"):
|
|||
with Files("LaterRun.jsm"):
|
||||
BUG_COMPONENT = ("Firefox", "Tours")
|
||||
|
||||
with Files("LightweightThemeChildListener.jsm"):
|
||||
with Files("LightweightThemeChildHelper.jsm"):
|
||||
BUG_COMPONENT = ("WebExtensions", "Themes")
|
||||
|
||||
with Files("LightWeightThemeWebInstallListener.jsm"):
|
||||
|
@ -153,7 +153,7 @@ EXTRA_JS_MODULES += [
|
|||
'FormValidationHandler.jsm',
|
||||
'HomePage.jsm',
|
||||
'LaterRun.jsm',
|
||||
'LightweightThemeChildListener.jsm',
|
||||
'LightweightThemeChildHelper.jsm',
|
||||
'LightWeightThemeWebInstallListener.jsm',
|
||||
'NetErrorContent.jsm',
|
||||
'OpenInTabsUtils.jsm',
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
build-infer.py
|
||||
==============
|
||||
|
||||
A script to build infer from source.
|
||||
|
||||
```
|
||||
usage: build-infer.py [-h] -c CONFIG [--clean]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-c CONFIG, --config CONFIG
|
||||
infer configuration file
|
||||
--clean Clean the build directory
|
||||
```
|
||||
|
||||
Pre-requisites
|
||||
--------------
|
||||
* Working build toolchain.
|
||||
* ocam
|
||||
* git
|
||||
* autoconf
|
||||
* libsqlite-dev
|
||||
* CMake
|
||||
* Ninja
|
||||
* Python 2.7
|
||||
|
||||
Please use the latest available CMake for your platform to avoid surprises.
|
||||
|
||||
Config file format
|
||||
------------------
|
||||
|
||||
build-clang.py accepts a JSON config format with the following fields:
|
||||
|
||||
* infer_revision: The infer revision to build.
|
||||
* infer_repo: git repository for infer.
|
||||
* patches: Optional list of patches to apply.
|
|
@ -0,0 +1,116 @@
|
|||
#!/usr/bin/env python3
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import argparse
|
||||
import sys
|
||||
import shutil
|
||||
from functools import reduce
|
||||
|
||||
|
||||
def check_run(args, path):
|
||||
print(' '.join(args) + ' in ' + path, file=sys.stderr)
|
||||
subprocess.run(args, cwd=path, check=True)
|
||||
|
||||
|
||||
def run_in(path, args, extra_env=None):
|
||||
'''
|
||||
Runs the given commands in the directory specified by <path>.
|
||||
'''
|
||||
env = dict(os.environ)
|
||||
env.update(extra_env or {})
|
||||
check_run(args, path)
|
||||
subprocess.run(args, cwd=path)
|
||||
|
||||
|
||||
def build_tar_package(tar, name, base, directory):
|
||||
name = os.path.realpath(name)
|
||||
run_in(base, [tar,
|
||||
'-c',
|
||||
'-%s' % ('J' if '.xz' in name else 'j'),
|
||||
'-f',
|
||||
name, directory])
|
||||
|
||||
|
||||
def is_git_repo(dir):
|
||||
'''Check whether the given directory is a git repository.'''
|
||||
from subprocess import CalledProcessError
|
||||
try:
|
||||
check_run(['git', 'rev-parse'], dir)
|
||||
return True
|
||||
except CalledProcessError:
|
||||
return False
|
||||
|
||||
|
||||
def git_clone(main_dir, url, clone_dir, commit):
|
||||
'''
|
||||
Clones the repository from <url> into <clone_dir>, and brings the
|
||||
repository to the state of <commit>.
|
||||
'''
|
||||
run_in(main_dir, ['git', 'clone', url, clone_dir])
|
||||
run_in(clone_dir, ['git', 'checkout', commit])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-c', '--config', required=True,
|
||||
type=argparse.FileType('r'),
|
||||
help='Infer configuration file')
|
||||
parser.add_argument('-b', '--base-dir',
|
||||
help="Base directory for code and build artifacts")
|
||||
parser.add_argument('--clean', action='store_true',
|
||||
help='Clean the build directory')
|
||||
parser.add_argument('--skip-tar', action='store_true',
|
||||
help='Skip tar packaging stage')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# The directories end up in the debug info, so the easy way of getting
|
||||
# a reproducible build is to run it in a know absolute directory.
|
||||
# We use a directory that is registered as a volume in the Docker image.
|
||||
if args.base_dir:
|
||||
base_dir = args.base_dir
|
||||
else:
|
||||
base_dir = reduce(os.path.join,
|
||||
[os.sep + 'builds', 'worker',
|
||||
'workspace', 'moz-toolchain'])
|
||||
infer_dir = os.path.join(base_dir, 'infer')
|
||||
source_dir = os.path.join(infer_dir, 'src')
|
||||
build_dir = os.path.join(infer_dir, 'build')
|
||||
|
||||
if args.clean:
|
||||
shutil.rmtree(build_dir)
|
||||
os.sys.exit(0)
|
||||
|
||||
config = json.load(args.config)
|
||||
infer_revision = config['infer_revision']
|
||||
infer_repo = config['infer_repo']
|
||||
|
||||
for folder in [infer_dir, source_dir, build_dir]:
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
|
||||
# clone infer
|
||||
if not is_git_repo(source_dir):
|
||||
# git doesn't like cloning into a non-empty folder. If src is not a git
|
||||
# repo then just remove it in order to reclone
|
||||
shutil.rmtree(source_dir)
|
||||
git_clone(infer_dir, infer_repo, source_dir, infer_revision)
|
||||
# apply a few patches
|
||||
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||
# clean the git directory by reseting all changes
|
||||
git_commands = [['clean', '-f'], ['reset', '--hard']]
|
||||
for command in git_commands:
|
||||
run_in(source_dir, ['git']+command)
|
||||
for p in config.get('patches', []):
|
||||
run_in(source_dir, ['git', 'apply', os.path.join(dir_path, p)])
|
||||
# build infer
|
||||
run_in(source_dir, ['./build-infer.sh', 'java'],
|
||||
extra_env={'NO_CMAKE_STRIP': '1'})
|
||||
package_name = 'infer'
|
||||
if not args.skip_tar:
|
||||
build_tar_package('tar', '%s.tar.xz' % (package_name),
|
||||
source_dir, os.path.join('infer', 'bin'))
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"infer_repo": "https://github.com/facebook/infer",
|
||||
"infer_revision": "14aa1edbf532675f48ba0a35a2e55d07b79b1f3c",
|
||||
"patches": []
|
||||
}
|
|
@ -158,8 +158,9 @@ private:
|
|||
// Tracks the set of declarations that the current expression/statement is
|
||||
// nested inside of.
|
||||
struct AutoSetContext {
|
||||
AutoSetContext(IndexConsumer *Self, NamedDecl *Context)
|
||||
AutoSetContext(IndexConsumer *Self, NamedDecl *Context, bool VisitImplicit = false)
|
||||
: Self(Self), Prev(Self->CurDeclContext), Decl(Context) {
|
||||
this->VisitImplicit = VisitImplicit || (Prev ? Prev->VisitImplicit : false);
|
||||
Self->CurDeclContext = this;
|
||||
}
|
||||
|
||||
|
@ -168,6 +169,7 @@ private:
|
|||
IndexConsumer *Self;
|
||||
AutoSetContext *Prev;
|
||||
NamedDecl *Decl;
|
||||
bool VisitImplicit;
|
||||
};
|
||||
AutoSetContext *CurDeclContext;
|
||||
|
||||
|
@ -571,7 +573,7 @@ public:
|
|||
return Super::TraverseCXXMethodDecl(D);
|
||||
}
|
||||
bool TraverseCXXConstructorDecl(CXXConstructorDecl *D) {
|
||||
AutoSetContext Asc(this, D);
|
||||
AutoSetContext Asc(this, D, true);
|
||||
const FunctionDecl *Def;
|
||||
// See TraverseFunctionDecl.
|
||||
if (TemplateStack && D->isDefined(Def) && Def && D != Def) {
|
||||
|
@ -796,6 +798,11 @@ public:
|
|||
return false;
|
||||
}
|
||||
|
||||
bool shouldVisitImplicitCode() const {
|
||||
AutoSetContext *Ctxt = CurDeclContext;
|
||||
return Ctxt ? Ctxt->VisitImplicit : false;
|
||||
}
|
||||
|
||||
bool TraverseClassTemplateDecl(ClassTemplateDecl *D) {
|
||||
AutoTemplateContext Atc(this);
|
||||
Super::TraverseClassTemplateDecl(D);
|
||||
|
|
|
@ -49,8 +49,8 @@ add_task(async function() {
|
|||
info("Open the Browser Content Toolbox");
|
||||
let toolbox = await gDevToolsBrowser.openContentProcessToolbox(gBrowser);
|
||||
|
||||
info("Wait for the debugger to be ready");
|
||||
await toolbox.getPanelWhenReady("jsdebugger");
|
||||
info("Select the debugger");
|
||||
await toolbox.selectTool("jsdebugger");
|
||||
|
||||
let dbg = createDebuggerContext(toolbox);
|
||||
ok(dbg, "Debugger context is available");
|
||||
|
|
|
@ -18,6 +18,9 @@ const MENUS_L10N = new LocalizationHelper("devtools/client/locales/menus.propert
|
|||
|
||||
loader.lazyRequireGetter(this, "gDevTools", "devtools/client/framework/devtools", true);
|
||||
loader.lazyRequireGetter(this, "gDevToolsBrowser", "devtools/client/framework/devtools-browser", true);
|
||||
loader.lazyRequireGetter(this, "Telemetry", "devtools/client/shared/telemetry");
|
||||
|
||||
let telemetry = null;
|
||||
|
||||
// Keep list of inserted DOM Elements in order to remove them on unload
|
||||
// Maps browser xul document => list of DOM Elements
|
||||
|
@ -78,6 +81,7 @@ function createToolMenuElements(toolDefinition, doc) {
|
|||
const oncommand = function(id, event) {
|
||||
const window = event.target.ownerDocument.defaultView;
|
||||
gDevToolsBrowser.selectToolCommand(window.gBrowser, id, Cu.now());
|
||||
sendEntryPointTelemetry();
|
||||
}.bind(null, id);
|
||||
|
||||
const menuitem = createMenuItem({
|
||||
|
@ -96,6 +100,26 @@ function createToolMenuElements(toolDefinition, doc) {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Send entry point telemetry explaining how the devtools were launched when
|
||||
* launched from the System Menu.. This functionality also lives inside
|
||||
* `devtools/startup/devtools-startup.js` but that codepath is only used the
|
||||
* first time a toolbox is opened for a tab.
|
||||
*/
|
||||
function sendEntryPointTelemetry() {
|
||||
if (!telemetry) {
|
||||
telemetry = new Telemetry();
|
||||
}
|
||||
|
||||
telemetry.addEventProperty(
|
||||
"devtools.main", "open", "tools", null, "shortcut", ""
|
||||
);
|
||||
|
||||
telemetry.addEventProperty(
|
||||
"devtools.main", "open", "tools", null, "entrypoint", "SystemMenu"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create xul menuitem, key elements for a given tool.
|
||||
* And then insert them into browser DOM.
|
||||
|
|
|
@ -453,6 +453,7 @@ DevTools.prototype = {
|
|||
*/
|
||||
async showToolbox(target, toolId, hostType, hostOptions, startTime) {
|
||||
let toolbox = this._toolboxes.get(target);
|
||||
|
||||
if (toolbox) {
|
||||
if (hostType != null && toolbox.hostType != hostType) {
|
||||
await toolbox.switchHost(hostType);
|
||||
|
@ -477,14 +478,18 @@ DevTools.prototype = {
|
|||
this._creatingToolboxes.delete(target);
|
||||
|
||||
if (startTime) {
|
||||
this.logToolboxOpenTime(toolbox.currentToolId, startTime);
|
||||
this.logToolboxOpenTime(toolbox, startTime);
|
||||
}
|
||||
this._firstShowToolbox = false;
|
||||
}
|
||||
|
||||
// We send the "enter" width here to ensure it is always sent *after*
|
||||
// the "open" event.
|
||||
const width = Math.ceil(toolbox.win.outerWidth / 50) * 50;
|
||||
const panelName = this.makeToolIdHumanReadable(toolId || toolbox.defaultToolId);
|
||||
this._telemetry.addEventProperty(
|
||||
"devtools.main", "open", "tools", null, "width", width);
|
||||
"devtools.main", "enter", panelName, null, "width", width
|
||||
);
|
||||
|
||||
return toolbox;
|
||||
},
|
||||
|
@ -496,22 +501,24 @@ DevTools.prototype = {
|
|||
* subsequent toolbox opening, which should all be faster.
|
||||
* These two probes are indexed by Tool ID.
|
||||
*
|
||||
* @param {String} toolId
|
||||
* The id of the opened tool.
|
||||
* @param {String} toolbox
|
||||
* Toolbox instance.
|
||||
* @param {Number} startTime
|
||||
* Indicates the time at which the user event related to the toolbox
|
||||
* opening started. This is a `Cu.now()` timing.
|
||||
*/
|
||||
logToolboxOpenTime(toolId, startTime) {
|
||||
logToolboxOpenTime(toolbox, startTime) {
|
||||
const toolId = toolbox.currentToolId || toolbox.defaultToolId;
|
||||
const delay = Cu.now() - startTime;
|
||||
const panelName = this.makeToolIdHumanReadable(toolId);
|
||||
|
||||
const telemetryKey = this._firstShowToolbox ?
|
||||
"DEVTOOLS_COLD_TOOLBOX_OPEN_DELAY_MS" : "DEVTOOLS_WARM_TOOLBOX_OPEN_DELAY_MS";
|
||||
this._telemetry.getKeyedHistogramById(telemetryKey).add(toolId, delay);
|
||||
|
||||
this._telemetry.addEventProperty(
|
||||
"devtools.main", "open", "tools", null, "first_panel",
|
||||
this.makeToolIdHumanReadable(toolId));
|
||||
"devtools.main", "open", "tools", null, "first_panel", panelName
|
||||
);
|
||||
},
|
||||
|
||||
makeToolIdHumanReadable(toolId) {
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
const toolId1 = "test-tool-1";
|
||||
const toolId2 = "test-tool-2";
|
||||
const toolId1 = "testtool1";
|
||||
const toolId2 = "testtool2";
|
||||
|
||||
function test() {
|
||||
addTab("about:blank").then(runTests1);
|
||||
|
|
|
@ -19,7 +19,7 @@ function testRegister(aToolbox) {
|
|||
gDevTools.once("tool-registered", toolRegistered);
|
||||
|
||||
gDevTools.registerTool({
|
||||
id: "test-tool",
|
||||
id: "testTool",
|
||||
label: "Test Tool",
|
||||
inMenu: true,
|
||||
isTargetSupported: () => true,
|
||||
|
@ -28,7 +28,7 @@ function testRegister(aToolbox) {
|
|||
}
|
||||
|
||||
function toolRegistered(toolId) {
|
||||
is(toolId, "test-tool", "tool-registered event handler sent tool id");
|
||||
is(toolId, "testTool", "tool-registered event handler sent tool id");
|
||||
|
||||
ok(gDevTools.getToolDefinitionMap().has(toolId), "tool added to map");
|
||||
|
||||
|
@ -61,11 +61,11 @@ function getAllBrowserWindows() {
|
|||
function testUnregister() {
|
||||
gDevTools.once("tool-unregistered", toolUnregistered);
|
||||
|
||||
gDevTools.unregisterTool("test-tool");
|
||||
gDevTools.unregisterTool("testTool");
|
||||
}
|
||||
|
||||
function toolUnregistered(toolId) {
|
||||
is(toolId, "test-tool", "tool-unregistered event handler sent tool id");
|
||||
is(toolId, "testTool", "tool-unregistered event handler sent tool id");
|
||||
|
||||
ok(!gDevTools.getToolDefinitionMap().has(toolId), "tool removed from map");
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ add_task(async function() {
|
|||
|
||||
function registerNewTool() {
|
||||
const toolDefinition = {
|
||||
id: "test-tool",
|
||||
id: "testTool",
|
||||
isTargetSupported: () => true,
|
||||
visibilityswitch: "devtools.test-tool.enabled",
|
||||
url: "about:blank",
|
||||
|
@ -47,11 +47,11 @@ function registerNewTool() {
|
|||
};
|
||||
|
||||
ok(gDevTools, "gDevTools exists");
|
||||
ok(!gDevTools.getToolDefinitionMap().has("test-tool"),
|
||||
ok(!gDevTools.getToolDefinitionMap().has("testTool"),
|
||||
"The tool is not registered");
|
||||
|
||||
gDevTools.registerTool(toolDefinition);
|
||||
ok(gDevTools.getToolDefinitionMap().has("test-tool"),
|
||||
ok(gDevTools.getToolDefinitionMap().has("testTool"),
|
||||
"The tool is registered");
|
||||
}
|
||||
|
||||
|
@ -476,7 +476,7 @@ async function lookupButtonForToolId(toolId) {
|
|||
}
|
||||
|
||||
async function cleanup() {
|
||||
gDevTools.unregisterTool("test-tool");
|
||||
gDevTools.unregisterTool("testTool");
|
||||
await toolbox.destroy();
|
||||
gBrowser.removeCurrentTab();
|
||||
for (const pref of modifiedPrefs) {
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
// tool is not supported.
|
||||
|
||||
const testToolDefinition = {
|
||||
id: "test-tool",
|
||||
id: "testTool",
|
||||
isTargetSupported: () => true,
|
||||
visibilityswitch: "devtools.test-tool.enabled",
|
||||
url: "about:blank",
|
||||
|
@ -31,7 +31,7 @@ add_task(async function() {
|
|||
let target = TargetFactory.forTab(tab);
|
||||
|
||||
let toolbox = await gDevTools.showToolbox(target, testToolDefinition.id);
|
||||
is(toolbox.currentToolId, "test-tool", "test-tool was selected");
|
||||
is(toolbox.currentToolId, "testTool", "test-tool was selected");
|
||||
await gDevTools.closeToolbox(target);
|
||||
|
||||
// Make the previously selected tool unavailable.
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
// right state, and that it works for an input inside of a panel.
|
||||
|
||||
const URL = "data:text/html;charset=utf8,test for textbox context menu";
|
||||
const textboxToolId = "test-tool-1";
|
||||
const textboxToolId = "testtool1";
|
||||
|
||||
registerCleanupFunction(() => {
|
||||
gDevTools.unregisterTool(textboxToolId);
|
||||
|
|
|
@ -13,8 +13,6 @@ const DISABLE_AUTOHIDE_PREF = "ui.popup.disable_autohide";
|
|||
const HOST_HISTOGRAM = "DEVTOOLS_TOOLBOX_HOST";
|
||||
const CURRENT_THEME_SCALAR = "devtools.current_theme";
|
||||
const HTML_NS = "http://www.w3.org/1999/xhtml";
|
||||
const REGEX_PANEL =
|
||||
/^(?:webconsole|inspector|jsdebugger|styleeditor|netmonitor|storage)$/;
|
||||
|
||||
var {Ci, Cc} = require("chrome");
|
||||
var promise = require("promise");
|
||||
|
@ -246,6 +244,10 @@ Toolbox.prototype = {
|
|||
this.component.setCurrentToolId(id);
|
||||
},
|
||||
|
||||
get defaultToolId() {
|
||||
return this._defaultToolId;
|
||||
},
|
||||
|
||||
get panelDefinitions() {
|
||||
return this._panelDefinitions;
|
||||
},
|
||||
|
@ -1903,15 +1905,7 @@ Toolbox.prototype = {
|
|||
const panelName = this.getTelemetryPanelNameOrOther(id);
|
||||
const prevPanelName = this.getTelemetryPanelNameOrOther(this.currentToolId);
|
||||
const cold = !this.getPanel(id);
|
||||
|
||||
this.telemetry.addEventProperties("devtools.main", "enter", panelName, null, {
|
||||
"host": this._hostType,
|
||||
"width": width,
|
||||
"start_state": reason,
|
||||
"panel_name": panelName,
|
||||
"cold": cold,
|
||||
// "session_id" is included at the end of this method.
|
||||
});
|
||||
const pending = ["host", "width", "start_state", "panel_name", "cold", "session_id"];
|
||||
|
||||
// On first load this.currentToolId === undefined so we need to skip sending
|
||||
// a devtools.main.exit telemetry event.
|
||||
|
@ -1926,27 +1920,38 @@ Toolbox.prototype = {
|
|||
});
|
||||
}
|
||||
|
||||
const pending = ["host", "width", "start_state", "panel_name", "cold", "session_id"];
|
||||
this.telemetry.addEventProperties("devtools.main", "open", "tools", null, {
|
||||
"width": width,
|
||||
"session_id": this.sessionId
|
||||
});
|
||||
|
||||
if (id === "webconsole") {
|
||||
pending.push("message_count");
|
||||
|
||||
// Cold webconsole event message_count is handled in
|
||||
// devtools/client/webconsole/webconsole-output-wrapper.js
|
||||
if (!cold) {
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "enter", "webconsole", null, "message_count", 0);
|
||||
}
|
||||
}
|
||||
this.telemetry.preparePendingEvent(
|
||||
"devtools.main", "enter", panelName, null, pending);
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "open", "tools", null, "session_id", this.sessionId
|
||||
);
|
||||
// We send the "enter" session ID here to ensure it is always sent *after*
|
||||
// the "open" session ID.
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "enter", panelName, null, "session_id", this.sessionId
|
||||
);
|
||||
|
||||
this.telemetry.preparePendingEvent("devtools.main", "enter", panelName, null, pending);
|
||||
|
||||
this.telemetry.addEventProperties("devtools.main", "enter", panelName, null, {
|
||||
"host": this._hostType,
|
||||
"start_state": reason,
|
||||
"panel_name": panelName,
|
||||
"cold": cold,
|
||||
"session_id": this.sessionId
|
||||
});
|
||||
|
||||
if (reason !== "initial_panel") {
|
||||
const width = Math.ceil(this.win.outerWidth / 50) * 50;
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "enter", panelName, null, "width", width
|
||||
);
|
||||
}
|
||||
|
||||
// Cold webconsole event message_count is handled in
|
||||
// devtools/client/webconsole/webconsole-output-wrapper.js
|
||||
if (!cold && id === "webconsole") {
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "enter", "webconsole", null, "message_count", 0);
|
||||
}
|
||||
|
||||
this.telemetry.toolOpened(id);
|
||||
},
|
||||
|
@ -2910,6 +2915,7 @@ Toolbox.prototype = {
|
|||
}
|
||||
|
||||
this.browserRequire = null;
|
||||
this._toolNames = null;
|
||||
|
||||
// Now that we are closing the toolbox we can re-enable the cache settings
|
||||
// and disable the service workers testing settings for the current tab.
|
||||
|
@ -2963,11 +2969,6 @@ Toolbox.prototype = {
|
|||
const prevPanelName = this.getTelemetryPanelNameOrOther(this.currentToolId);
|
||||
|
||||
this.telemetry.toolClosed("toolbox");
|
||||
this.telemetry.recordEvent("devtools.main", "close", "tools", null, {
|
||||
"host": host,
|
||||
"width": width,
|
||||
"session_id": this.sessionId
|
||||
});
|
||||
this.telemetry.recordEvent("devtools.main", "exit", prevPanelName, null, {
|
||||
"host": host,
|
||||
"width": width,
|
||||
|
@ -2976,6 +2977,11 @@ Toolbox.prototype = {
|
|||
"reason": "toolbox_close",
|
||||
"session_id": this.sessionId
|
||||
});
|
||||
this.telemetry.recordEvent("devtools.main", "close", "tools", null, {
|
||||
"host": host,
|
||||
"width": width,
|
||||
"session_id": this.sessionId
|
||||
});
|
||||
|
||||
// Finish all outstanding tasks (which means finish destroying panels and
|
||||
// then destroying the host, successfully or not) before destroying the
|
||||
|
@ -3378,7 +3384,13 @@ Toolbox.prototype = {
|
|||
},
|
||||
|
||||
getTelemetryPanelNameOrOther: function(id) {
|
||||
if (!REGEX_PANEL.test(id)) {
|
||||
if (!this._toolNames) {
|
||||
const definitions = gDevTools.getToolDefinitionArray();
|
||||
const definitionIds = definitions.map(definition => definition.id);
|
||||
|
||||
this._toolNames = new Set(definitionIds);
|
||||
}
|
||||
if (!this._toolNames.has(id)) {
|
||||
return "other";
|
||||
}
|
||||
return id;
|
||||
|
|
|
@ -1978,7 +1978,7 @@ function getStacktraceElements(props, preview) {
|
|||
// Result:
|
||||
// ["scriptLocation:2:100", "scriptLocation", "2", "100"]
|
||||
const locationParts = location.match(/^(.*):(\d+):(\d+)$/);
|
||||
if (props.onViewSourceInDebugger && location && !IGNORED_SOURCE_URLS.includes(locationParts[1]) && locationParts) {
|
||||
if (props.onViewSourceInDebugger && location && locationParts && !IGNORED_SOURCE_URLS.includes(locationParts[1])) {
|
||||
const [, url, line, column] = locationParts;
|
||||
onLocationClick = e => {
|
||||
// Don't trigger ObjectInspector expand/collapse.
|
||||
|
|
|
@ -553,6 +553,10 @@ class Telemetry {
|
|||
toolOpened(id) {
|
||||
const charts = getChartsFromToolId(id);
|
||||
|
||||
if (!charts) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (charts.timerHist) {
|
||||
this.start(charts.timerHist, this);
|
||||
}
|
||||
|
@ -577,7 +581,7 @@ class Telemetry {
|
|||
toolClosed(id) {
|
||||
const charts = getChartsFromToolId(id);
|
||||
|
||||
if (charts.timerHist) {
|
||||
if (charts && charts.timerHist) {
|
||||
this.finish(charts.timerHist, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -276,6 +276,7 @@ subsuite = clipboard
|
|||
[browser_webconsole_cspro.js]
|
||||
[browser_webconsole_document_focus.js]
|
||||
[browser_webconsole_duplicate_errors.js]
|
||||
[browser_webconsole_error_with_longstring_stack.js]
|
||||
[browser_webconsole_error_with_unicode.js]
|
||||
[browser_webconsole_errors_after_page_reload.js]
|
||||
[browser_webconsole_eval_in_debugger_stackframe.js]
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
/* Any copyright is dedicated to the Public Domain.
|
||||
* http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
// Check if an error with a longString stack is displayed as expected.
|
||||
|
||||
"use strict";
|
||||
|
||||
const MESSAGE = "Error with longString stack";
|
||||
const TEST_URI = `data:text/html;charset=utf8,<script>
|
||||
const x = new Error("longString stack");
|
||||
x.stack = "s@http://exampl.com:1:1\\n".repeat(1000);
|
||||
console.log("${MESSAGE}", x);
|
||||
</script>`;
|
||||
|
||||
add_task(async function() {
|
||||
const hud = await openNewTabAndConsole(TEST_URI);
|
||||
|
||||
info("Wait for the error to be logged");
|
||||
const msgNode = await waitFor(() => findMessage(hud, MESSAGE));
|
||||
ok(msgNode, `Error logged`);
|
||||
|
||||
const errorNode = msgNode.querySelector(".objectBox-stackTrace");
|
||||
ok(errorNode, "The error object is logged as expected");
|
||||
ok(errorNode.textContent.includes("longString stack"));
|
||||
ok(errorNode.querySelectorAll(".objectBox-stackTrace-fn").length > 0,
|
||||
"Frames functions are displayed");
|
||||
ok(errorNode.querySelectorAll(".objectBox-stackTrace-location").length > 0,
|
||||
"Frames location are displayed");
|
||||
});
|
|
@ -429,8 +429,13 @@ WebConsoleOutputWrapper.prototype = {
|
|||
store.dispatch(actions.messagesAdd(this.queuedMessageAdds));
|
||||
|
||||
const length = this.queuedMessageAdds.length;
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "enter", "webconsole", null, "message_count", length);
|
||||
|
||||
// This telemetry event is only useful when we have a toolbox so only
|
||||
// send it when we have one.
|
||||
if (this.toolbox) {
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "enter", "webconsole", null, "message_count", length);
|
||||
}
|
||||
|
||||
this.queuedMessageAdds = [];
|
||||
|
||||
|
|
|
@ -263,6 +263,9 @@ this.DevToolsShim = {
|
|||
}
|
||||
|
||||
if (reason) {
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "open", "tools", null, "shortcut", ""
|
||||
);
|
||||
this.telemetry.addEventProperty(
|
||||
"devtools.main", "open", "tools", null, "entrypoint", reason
|
||||
);
|
||||
|
|
|
@ -841,6 +841,18 @@ DevToolsStartup.prototype = {
|
|||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Send entry point telemetry explaining how the devtools were launched. This
|
||||
* functionality also lives inside `devtools/client/framework/browser-menus.js`
|
||||
* because this codepath is only used the first time a toolbox is opened for a
|
||||
* tab.
|
||||
*
|
||||
* @param {String} reason
|
||||
* One of "KeyShortcut", "SystemMenu", "HamburgerMenu", "ContextMenu",
|
||||
* "CommandLine".
|
||||
* @param {String} key
|
||||
* The key used by a key shortcut.
|
||||
*/
|
||||
sendEntryPointTelemetry(reason, key = "") {
|
||||
if (!reason) {
|
||||
return;
|
||||
|
|
|
@ -126,6 +126,7 @@ NS_IMPL_CYCLE_COLLECTION_CLASS(nsFrameMessageManager)
|
|||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(nsFrameMessageManager)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mListeners)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mChildManagers)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSharedData)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
|
||||
|
||||
NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(nsFrameMessageManager)
|
||||
|
@ -138,6 +139,7 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(nsFrameMessageManager)
|
|||
tmp->mChildManagers[i - 1]->Disconnect(false);
|
||||
}
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mChildManagers)
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mSharedData)
|
||||
tmp->mInitialProcessData.setNull();
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
|
||||
|
||||
|
|
|
@ -514,6 +514,14 @@ SharedMapChangeEvent::Constructor(EventTarget* aEventTarget,
|
|||
return event.forget();
|
||||
}
|
||||
|
||||
NS_IMPL_CYCLE_COLLECTION_INHERITED(WritableSharedMap, SharedMap, mReadOnly)
|
||||
|
||||
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(WritableSharedMap)
|
||||
NS_INTERFACE_MAP_END_INHERITING(SharedMap)
|
||||
|
||||
NS_IMPL_ADDREF_INHERITED(WritableSharedMap, SharedMap)
|
||||
NS_IMPL_RELEASE_INHERITED(WritableSharedMap, SharedMap)
|
||||
|
||||
} // ipc
|
||||
} // dom
|
||||
} // mozilla
|
||||
|
|
|
@ -322,6 +322,8 @@ protected:
|
|||
class WritableSharedMap final : public SharedMap
|
||||
{
|
||||
public:
|
||||
NS_DECL_ISUPPORTS_INHERITED
|
||||
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(WritableSharedMap, SharedMap)
|
||||
|
||||
WritableSharedMap();
|
||||
|
||||
|
|
|
@ -59,7 +59,6 @@ struct AudioIpcInitParams {
|
|||
int mServerConnection;
|
||||
size_t mPoolSize;
|
||||
size_t mStackSize;
|
||||
void (*mThreadCreateCallback)(const char*);
|
||||
};
|
||||
|
||||
// These functions are provided by audioipc-server crate
|
||||
|
@ -431,9 +430,6 @@ cubeb* GetCubebContextUnlocked()
|
|||
initParams.mPoolSize = sAudioIPCPoolSize;
|
||||
initParams.mStackSize = sAudioIPCStackSize;
|
||||
initParams.mServerConnection = sIPCConnection->ClonePlatformHandle().release();
|
||||
initParams.mThreadCreateCallback = [](const char* aName) {
|
||||
PROFILER_REGISTER_THREAD(aName);
|
||||
};
|
||||
|
||||
MOZ_LOG(gCubebLog, LogLevel::Debug, ("%s: %d", PREF_AUDIOIPC_POOL_SIZE, (int) initParams.mPoolSize));
|
||||
MOZ_LOG(gCubebLog, LogLevel::Debug, ("%s: %d", PREF_AUDIOIPC_STACK_SIZE, (int) initParams.mStackSize));
|
||||
|
|
|
@ -224,7 +224,9 @@ PaintThread::InitPaintWorkers()
|
|||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
int32_t count = PaintThread::CalculatePaintWorkerCount();
|
||||
mPaintWorkers = SharedThreadPool::Get(NS_LITERAL_CSTRING("PaintWorker"), count);
|
||||
if (count != 1) {
|
||||
mPaintWorkers = SharedThreadPool::Get(NS_LITERAL_CSTRING("PaintWorker"), count);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -273,7 +275,8 @@ PaintThread::IsOnPaintThread()
|
|||
bool
|
||||
PaintThread::IsOnPaintWorkerThread()
|
||||
{
|
||||
return mPaintWorkers && mPaintWorkers->IsOnCurrentThread();
|
||||
return (mPaintWorkers && mPaintWorkers->IsOnCurrentThread()) ||
|
||||
(sThreadId == PlatformThread::CurrentId());
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -419,7 +422,6 @@ PaintThread::PaintTiledContents(CapturedTiledPaintState* aState)
|
|||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
MOZ_ASSERT(aState);
|
||||
MOZ_ASSERT(mPaintWorkers);
|
||||
|
||||
if (gfxPrefs::LayersOMTPDumpCapture() && aState->mCapture) {
|
||||
aState->mCapture->Dump();
|
||||
|
@ -437,10 +439,14 @@ PaintThread::PaintTiledContents(CapturedTiledPaintState* aState)
|
|||
self->AsyncPaintTiledContents(cbc, state);
|
||||
});
|
||||
|
||||
nsIEventTarget* paintThread = mPaintWorkers ?
|
||||
static_cast<nsIEventTarget*>(mPaintWorkers.get()) :
|
||||
static_cast<nsIEventTarget*>(sThread.get());
|
||||
|
||||
#ifndef OMTP_FORCE_SYNC
|
||||
mPaintWorkers->Dispatch(task.forget());
|
||||
paintThread->Dispatch(task.forget());
|
||||
#else
|
||||
SyncRunnable::DispatchToThread(mPaintWorkers, task);
|
||||
SyncRunnable::DispatchToThread(paintThread, task);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -26,8 +26,6 @@
|
|||
#include <pthread_np.h>
|
||||
#endif
|
||||
|
||||
#include "nsThreadUtils.h"
|
||||
|
||||
#if defined(OS_MACOSX)
|
||||
namespace base {
|
||||
void InitThreading();
|
||||
|
@ -35,10 +33,6 @@ void InitThreading();
|
|||
#endif
|
||||
|
||||
static void* ThreadFunc(void* closure) {
|
||||
// Create a nsThread wrapper for the current platform thread, and register it
|
||||
// with the thread manager.
|
||||
(void) NS_GetCurrentThread();
|
||||
|
||||
PlatformThread::Delegate* delegate =
|
||||
static_cast<PlatformThread::Delegate*>(closure);
|
||||
delegate->ThreadMain();
|
||||
|
@ -98,10 +92,21 @@ void PlatformThread::SetName(const char* name) {
|
|||
if (PlatformThread::CurrentId() == getpid())
|
||||
return;
|
||||
|
||||
// Using NS_SetCurrentThreadName, as opposed to using platform APIs directly,
|
||||
// also sets the thread name on the PRThread wrapper, and allows us to
|
||||
// retrieve it using PR_GetThreadName.
|
||||
NS_SetCurrentThreadName(name);
|
||||
// http://0pointer.de/blog/projects/name-your-threads.html
|
||||
// Set the name for the LWP (which gets truncated to 15 characters).
|
||||
// Note that glibc also has a 'pthread_setname_np' api, but it may not be
|
||||
// available everywhere and it's only benefit over using prctl directly is
|
||||
// that it can set the name of threads other than the current thread.
|
||||
#if defined(OS_LINUX)
|
||||
prctl(PR_SET_NAME, reinterpret_cast<uintptr_t>(name), 0, 0, 0);
|
||||
#elif defined(OS_NETBSD)
|
||||
pthread_setname_np(pthread_self(), "%s", (void *)name);
|
||||
#elif defined(OS_BSD) && !defined(__GLIBC__)
|
||||
pthread_set_name_np(pthread_self(), name);
|
||||
#elif defined(OS_SOLARIS)
|
||||
pthread_setname_np(pthread_self(), name);
|
||||
#else
|
||||
#endif
|
||||
}
|
||||
#endif // !OS_MACOSX
|
||||
|
||||
|
@ -124,9 +129,8 @@ bool CreateThread(size_t stack_size, bool joinable,
|
|||
pthread_attr_setdetachstate(&attributes, PTHREAD_CREATE_DETACHED);
|
||||
}
|
||||
|
||||
if (stack_size == 0)
|
||||
stack_size = nsIThreadManager::DEFAULT_STACK_SIZE;
|
||||
pthread_attr_setstacksize(&attributes, stack_size);
|
||||
if (stack_size > 0)
|
||||
pthread_attr_setstacksize(&attributes, stack_size);
|
||||
|
||||
success = !pthread_create(thread_handle, &attributes, ThreadFunc, delegate);
|
||||
|
||||
|
|
|
@ -9,8 +9,6 @@
|
|||
#include "base/logging.h"
|
||||
#include "base/win_util.h"
|
||||
|
||||
#include "nsThreadUtils.h"
|
||||
|
||||
namespace {
|
||||
|
||||
// The information on how to set the thread name comes from
|
||||
|
@ -25,10 +23,6 @@ typedef struct tagTHREADNAME_INFO {
|
|||
} THREADNAME_INFO;
|
||||
|
||||
DWORD __stdcall ThreadFunc(void* closure) {
|
||||
// Create a nsThread wrapper for the current platform thread, and register it
|
||||
// with the thread manager.
|
||||
(void) NS_GetCurrentThread();
|
||||
|
||||
PlatformThread::Delegate* delegate =
|
||||
static_cast<PlatformThread::Delegate*>(closure);
|
||||
delegate->ThreadMain();
|
||||
|
@ -54,10 +48,24 @@ void PlatformThread::Sleep(int duration_ms) {
|
|||
|
||||
// static
|
||||
void PlatformThread::SetName(const char* name) {
|
||||
// Using NS_SetCurrentThreadName, as opposed to using platform APIs directly,
|
||||
// also sets the thread name on the PRThread wrapper, and allows us to
|
||||
// retrieve it using PR_GetThreadName.
|
||||
NS_SetCurrentThreadName(name);
|
||||
#ifdef HAVE_SEH_EXCEPTIONS
|
||||
// The debugger needs to be around to catch the name in the exception. If
|
||||
// there isn't a debugger, we are just needlessly throwing an exception.
|
||||
if (!::IsDebuggerPresent())
|
||||
return;
|
||||
|
||||
THREADNAME_INFO info;
|
||||
info.dwType = 0x1000;
|
||||
info.szName = name;
|
||||
info.dwThreadID = CurrentId();
|
||||
info.dwFlags = 0;
|
||||
|
||||
MOZ_SEH_TRY {
|
||||
RaiseException(kVCThreadNameException, 0, sizeof(info)/sizeof(DWORD),
|
||||
reinterpret_cast<DWORD_PTR*>(&info));
|
||||
} MOZ_SEH_EXCEPT(EXCEPTION_CONTINUE_EXECUTION) {
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// static
|
||||
|
|
|
@ -943,12 +943,7 @@ js::CurrentThreadIsParseThread()
|
|||
}
|
||||
#endif
|
||||
|
||||
// We want our default stack size limit to be approximately 2MB, to be safe, but
|
||||
// expect most threads to use much less. On Linux, however, requesting a stack
|
||||
// of 2MB or larger risks the kernel allocating an entire 2MB huge page for it
|
||||
// on first access, which we do not want. To avoid this possibility, we subtract
|
||||
// 2 standard VM page sizes from our default.
|
||||
static const uint32_t kDefaultHelperStackSize = 2048 * 1024 - 2 * 4096;
|
||||
static const uint32_t kDefaultHelperStackSize = 2048 * 1024;
|
||||
static const uint32_t kDefaultHelperStackQuota = 1800 * 1024;
|
||||
|
||||
// TSan enforces a minimum stack size that's just slightly larger than our
|
||||
|
|
|
@ -79,10 +79,6 @@ using namespace xpc;
|
|||
using namespace JS;
|
||||
using mozilla::dom::AutoEntryScript;
|
||||
|
||||
// The watchdog thread loop is pretty trivial, and should not require much stack
|
||||
// space to do its job. So only give it 32KiB.
|
||||
static constexpr size_t kWatchdogStackSize = 32 * 1024;
|
||||
|
||||
static void WatchdogMain(void* arg);
|
||||
class Watchdog;
|
||||
class WatchdogManager;
|
||||
|
@ -147,7 +143,7 @@ class Watchdog
|
|||
// join it on shutdown.
|
||||
mThread = PR_CreateThread(PR_USER_THREAD, WatchdogMain, this,
|
||||
PR_PRIORITY_NORMAL, PR_GLOBAL_THREAD,
|
||||
PR_JOINABLE_THREAD, kWatchdogStackSize);
|
||||
PR_JOINABLE_THREAD, 0);
|
||||
if (!mThread)
|
||||
MOZ_CRASH("PR_CreateThread failed!");
|
||||
|
||||
|
@ -476,8 +472,6 @@ static void
|
|||
WatchdogMain(void* arg)
|
||||
{
|
||||
AUTO_PROFILER_REGISTER_THREAD("JS Watchdog");
|
||||
// Create an nsThread wrapper for the thread and register it with the thread manager.
|
||||
Unused << NS_GetCurrentThread();
|
||||
NS_SetCurrentThreadName("JS Watchdog");
|
||||
|
||||
Watchdog* self = static_cast<Watchdog*>(arg);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
== input.html input-ref.html
|
||||
== button.html button-ref.html
|
||||
fuzzy-if(isDebugBuild&>kWidget,1,1) == button.html button-ref.html
|
||||
== textarea.html textarea-ref.html
|
||||
== select.html select-ref.html
|
||||
== fieldset.html fieldset-ref.html
|
||||
|
|
|
@ -20,7 +20,6 @@ use std::os::raw::c_void;
|
|||
use std::os::unix::io::FromRawFd;
|
||||
use std::os::unix::net;
|
||||
use std::sync::mpsc;
|
||||
use std::thread;
|
||||
use stream;
|
||||
use tokio_core::reactor::{Handle, Remote};
|
||||
use tokio_uds::UnixStream;
|
||||
|
@ -100,25 +99,9 @@ impl ContextOps for ClientContext {
|
|||
|
||||
let (tx_rpc, rx_rpc) = mpsc::channel();
|
||||
|
||||
let params = CPUPOOL_INIT_PARAMS.with(|p| {
|
||||
p.replace(None).unwrap()
|
||||
});
|
||||
|
||||
let thread_create_callback = params.thread_create_callback;
|
||||
|
||||
let register_thread = move || {
|
||||
if let Some(func) = thread_create_callback {
|
||||
let thr = thread::current();
|
||||
let name = CString::new(thr.name().unwrap()).unwrap();
|
||||
func(name.as_ptr());
|
||||
}
|
||||
};
|
||||
|
||||
let core = t!(core::spawn_thread("AudioIPC Client RPC", move || {
|
||||
let handle = core::handle();
|
||||
|
||||
register_thread();
|
||||
|
||||
open_server_stream()
|
||||
.ok()
|
||||
.and_then(|stream| UnixStream::from_stream(stream, &handle).ok())
|
||||
|
@ -133,12 +116,14 @@ impl ContextOps for ClientContext {
|
|||
|
||||
let rpc = t!(rx_rpc.recv());
|
||||
|
||||
let cpupool = futures_cpupool::Builder::new()
|
||||
let cpupool = CPUPOOL_INIT_PARAMS.with(|p| {
|
||||
let params = p.replace(None).unwrap();
|
||||
futures_cpupool::Builder::new()
|
||||
.name_prefix("AudioIPC")
|
||||
.after_start(register_thread)
|
||||
.pool_size(params.pool_size)
|
||||
.stack_size(params.stack_size)
|
||||
.create();
|
||||
.create()
|
||||
});
|
||||
|
||||
let ctx = Box::new(ClientContext {
|
||||
_ops: &CLIENT_OPS as *const _,
|
||||
|
|
|
@ -37,14 +37,12 @@ pub struct AudioIpcInitParams {
|
|||
pub server_connection: c_int,
|
||||
pub pool_size: usize,
|
||||
pub stack_size: usize,
|
||||
pub thread_create_callback: Option<extern "C" fn(*const ::std::os::raw::c_char)>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct CpuPoolInitParams {
|
||||
pub pool_size: usize,
|
||||
pub stack_size: usize,
|
||||
pub thread_create_callback: Option<extern "C" fn(*const ::std::os::raw::c_char)>,
|
||||
}
|
||||
|
||||
impl CpuPoolInitParams {
|
||||
|
@ -52,7 +50,6 @@ impl CpuPoolInitParams {
|
|||
CpuPoolInitParams {
|
||||
pool_size: params.pool_size,
|
||||
stack_size: params.stack_size,
|
||||
thread_create_callback: params.thread_create_callback,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,27 +17,14 @@ function* testHasNoPermission(params) {
|
|||
let contentSetup = params.contentSetup || (() => Promise.resolve());
|
||||
|
||||
async function background(contentSetup) {
|
||||
browser.runtime.onMessage.addListener((msg, sender) => {
|
||||
browser.test.assertEq(msg, "second script ran", "second script ran");
|
||||
browser.test.notifyPass("executeScript");
|
||||
});
|
||||
|
||||
browser.test.onMessage.addListener(async msg => {
|
||||
browser.test.assertEq(msg, "execute-script");
|
||||
|
||||
let tabs = await browser.tabs.query({currentWindow: true});
|
||||
await browser.test.assertRejects(browser.tabs.executeScript({
|
||||
file: "script.js",
|
||||
}), /Missing host permission for the tab/);
|
||||
|
||||
// TODO bug 1457115: Remove the executeScript call below.
|
||||
|
||||
// Execute a script we know we have permissions for in the
|
||||
// second tab, in the hopes that it will execute after the
|
||||
// first one.
|
||||
browser.tabs.executeScript(tabs[1].id, {
|
||||
file: "second-script.js",
|
||||
});
|
||||
browser.test.notifyPass("executeScript");
|
||||
});
|
||||
|
||||
await contentSetup();
|
||||
|
@ -54,10 +41,6 @@ function* testHasNoPermission(params) {
|
|||
"script.js": function() {
|
||||
browser.runtime.sendMessage("first script ran");
|
||||
},
|
||||
|
||||
"second-script.js": function() {
|
||||
browser.runtime.sendMessage("second script ran");
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -75,23 +58,21 @@ function* testHasNoPermission(params) {
|
|||
}
|
||||
|
||||
add_task(async function testBadPermissions() {
|
||||
let win1 = window.open("http://example.com/");
|
||||
let win2 = window.open("http://mochi.test:8888/");
|
||||
let win = window.open("http://mochi.test:8888/");
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
|
||||
info("Test no special permissions");
|
||||
await testHasNoPermission({
|
||||
manifest: {"permissions": ["http://example.com/"]},
|
||||
manifest: {"permissions": []},
|
||||
});
|
||||
|
||||
info("Test tabs permissions");
|
||||
await testHasNoPermission({
|
||||
manifest: {"permissions": ["http://example.com/", "tabs"]},
|
||||
manifest: {"permissions": ["tabs"]},
|
||||
});
|
||||
|
||||
win2.close();
|
||||
win1.close();
|
||||
win.close();
|
||||
});
|
||||
|
||||
add_task(async function testBadURL() {
|
||||
|
|
|
@ -185,7 +185,12 @@ nsAboutProtocolHandler::NewChannel2(nsIURI* uri,
|
|||
// a srcdoc iframe. To ensure that it stays unresolvable, we pretend
|
||||
// that it doesn't exist.
|
||||
rv = NS_ERROR_FACTORY_NOT_REGISTERED;
|
||||
} else {
|
||||
} else if (!path.EqualsLiteral("blank") &&
|
||||
!path.EqualsLiteral("neterror") &&
|
||||
!path.EqualsLiteral("home") &&
|
||||
!path.EqualsLiteral("welcome") &&
|
||||
!path.EqualsLiteral("newtab") &&
|
||||
!path.EqualsLiteral("certerror")) {
|
||||
nsCOMPtr<nsIEnterprisePolicies> policyManager =
|
||||
do_GetService("@mozilla.org/browser/enterprisepolicies;1", &rv2);
|
||||
if (NS_SUCCEEDED(rv2)) {
|
||||
|
|
|
@ -80,7 +80,8 @@ Http2Stream::Http2Stream(nsAHttpTransaction *httpTransaction,
|
|||
{
|
||||
MOZ_ASSERT(OnSocketThread(), "not on socket thread");
|
||||
|
||||
LOG3(("Http2Stream::Http2Stream %p", this));
|
||||
nsHttpTransaction *trans = mTransaction->QueryHttpTransaction();
|
||||
LOG3(("Http2Stream::Http2Stream %p trans=%p atrans=%p", this, trans, httpTransaction));
|
||||
|
||||
mServerReceiveWindow = session->GetServerInitialStreamWindow();
|
||||
mClientReceiveWindow = session->PushAllowance();
|
||||
|
@ -104,7 +105,6 @@ Http2Stream::Http2Stream(nsAHttpTransaction *httpTransaction,
|
|||
MOZ_ASSERT(httpPriority >= 0);
|
||||
SetPriority(static_cast<uint32_t>(httpPriority));
|
||||
|
||||
nsHttpTransaction *trans = mTransaction->QueryHttpTransaction();
|
||||
if (trans) {
|
||||
mTransactionTabId = trans->TopLevelOuterContentWindowId();
|
||||
}
|
||||
|
|
|
@ -421,9 +421,10 @@ add_task(async function test_bookmark_change_during_sync() {
|
|||
"Folder 1 should have 3 children after first sync");
|
||||
await assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
|
||||
"Folder 2 should have 2 children after first sync");
|
||||
let taggedURIs = PlacesUtils.tagging.getURIsForTag("taggy");
|
||||
let taggedURIs = [];
|
||||
await PlacesUtils.bookmarks.fetch({tags: ["taggy"]}, b => taggedURIs.push(b.url));
|
||||
equal(taggedURIs.length, 1, "Should have 1 tagged URI");
|
||||
equal(taggedURIs[0].spec, "https://example.org/",
|
||||
equal(taggedURIs[0].href, "https://example.org/",
|
||||
"Synced tagged bookmark should appear in tagged URI list");
|
||||
|
||||
changes = await PlacesSyncUtils.bookmarks.pullChanges();
|
||||
|
|
|
@ -86,6 +86,9 @@ jobs:
|
|||
fetch:
|
||||
symbol: I(fetch)
|
||||
parent: debian9-base
|
||||
infer-build:
|
||||
symbol: I(infer)
|
||||
parent: debian9-base
|
||||
mingw32-build:
|
||||
symbol: I(mingw)
|
||||
parent: debian9-base
|
||||
|
|
|
@ -48,6 +48,7 @@ jobs:
|
|||
toolchains:
|
||||
- linux64-clang
|
||||
- linux64-clang-tidy
|
||||
- linux64-infer
|
||||
- linux64-rust
|
||||
- linux64-sccache
|
||||
|
||||
|
|
|
@ -125,6 +125,28 @@ linux64-clang-tidy:
|
|||
toolchains:
|
||||
- linux64-gcc-4.9
|
||||
|
||||
linux64-infer:
|
||||
description: "infer build"
|
||||
index:
|
||||
product: static-analysis
|
||||
job-name: linux64-infer
|
||||
treeherder:
|
||||
kind: build
|
||||
platform: toolchains/opt
|
||||
symbol: TL(infer)
|
||||
tier: 1
|
||||
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
|
||||
worker:
|
||||
docker-image: {in-tree: infer-build}
|
||||
max-run-time: 3600
|
||||
run:
|
||||
using: toolchain-script
|
||||
script: build-infer-linux.sh
|
||||
resources:
|
||||
- 'build/build-infer/build-infer.py'
|
||||
- 'build/build-infer/infer-linux64.json'
|
||||
toolchain-artifact: public/build/infer.tar.xz
|
||||
|
||||
linux64-gcc-4.9:
|
||||
description: "GCC 4.9 toolchain build"
|
||||
treeherder:
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
# %ARG DOCKER_IMAGE_PARENT
|
||||
FROM $DOCKER_IMAGE_PARENT
|
||||
MAINTAINER Mike Hommey <mhommey@mozilla.com>
|
||||
|
||||
VOLUME /builds/worker/checkouts
|
||||
VOLUME /builds/worker/workspace
|
||||
VOLUME /builds/worker/tooltool-cache
|
||||
|
||||
ENV XZ_OPT=-T0
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install \
|
||||
autoconf \
|
||||
bison \
|
||||
bzip2 \
|
||||
flex \
|
||||
curl \
|
||||
git \
|
||||
opam \
|
||||
libsqlite3-dev \
|
||||
autoconf \
|
||||
automake \
|
||||
cmake \
|
||||
libc6-dev \
|
||||
openjdk-8-jdk-headless \
|
||||
pkg-config \
|
||||
patch \
|
||||
tar \
|
||||
unzip \
|
||||
zlib1g-dev
|
|
@ -0,0 +1,22 @@
|
|||
#!/bin/bash
|
||||
set -x -e -v
|
||||
|
||||
# This script is for building infer for Linux.
|
||||
|
||||
WORKSPACE=$HOME/workspace
|
||||
HOME_DIR=$WORKSPACE/build
|
||||
UPLOAD_DIR=$HOME/artifacts
|
||||
|
||||
cd $HOME_DIR/src
|
||||
|
||||
# gets a bit too verbose here
|
||||
set +x
|
||||
|
||||
cd build/build-infer
|
||||
./build-infer.py -c infer-linux64.json
|
||||
|
||||
set -x
|
||||
|
||||
# Put a tarball in the artifacts dir
|
||||
mkdir -p $UPLOAD_DIR
|
||||
cp infer.tar.* $UPLOAD_DIR
|
|
@ -46,7 +46,6 @@ config = {
|
|||
"cppunittest": "runcppunittests.py",
|
||||
"gtest": "rungtests.py",
|
||||
"jittest": "jit_test.py",
|
||||
"mozbase": "test.py",
|
||||
"mozmill": "runtestlist.py",
|
||||
},
|
||||
"minimum_tests_zip_dirs": [
|
||||
|
@ -103,14 +102,6 @@ config = {
|
|||
"run_filename": "runtests.py",
|
||||
"testsdir": "mochitest"
|
||||
},
|
||||
"mozbase": {
|
||||
"options": [
|
||||
"-b",
|
||||
"%(binary_path)s"
|
||||
],
|
||||
"run_filename": "test.py",
|
||||
"testsdir": "mozbase"
|
||||
},
|
||||
"mozmill": {
|
||||
"options": [
|
||||
"--binary=%(binary_path)s",
|
||||
|
@ -245,9 +236,6 @@ config = {
|
|||
"jittest2": ["--total-chunks=2", "--this-chunk=2"],
|
||||
"jittest-chunked": [],
|
||||
},
|
||||
"all_mozbase_suites": {
|
||||
"mozbase": []
|
||||
},
|
||||
"run_cmd_checks_enabled": True,
|
||||
"preflight_run_cmd_suites": [
|
||||
# NOTE 'enabled' is only here while we have unconsolidated configs
|
||||
|
@ -278,7 +266,6 @@ config = {
|
|||
"mozmill": [],
|
||||
"cppunittest": [],
|
||||
"jittest": [],
|
||||
"mozbase": [],
|
||||
},
|
||||
"download_minidump_stackwalk": True,
|
||||
"minidump_stackwalk_path": MINIDUMP_STACKWALK_PATH,
|
||||
|
|
|
@ -20,7 +20,6 @@ config = {
|
|||
"cppunittest": "runcppunittests.py",
|
||||
"gtest": "rungtests.py",
|
||||
"jittest": "jit_test.py",
|
||||
"mozbase": "test.py",
|
||||
"mozmill": "runtestlist.py",
|
||||
},
|
||||
"minimum_tests_zip_dirs": [
|
||||
|
@ -73,14 +72,6 @@ config = {
|
|||
"run_filename": "runtests.py",
|
||||
"testsdir": "mochitest"
|
||||
},
|
||||
"mozbase": {
|
||||
"options": [
|
||||
"-b",
|
||||
"%(binary_path)s"
|
||||
],
|
||||
"run_filename": "test.py",
|
||||
"testsdir": "mozbase"
|
||||
},
|
||||
"mozmill": {
|
||||
"options": [
|
||||
"--binary=%(binary_path)s",
|
||||
|
@ -194,9 +185,6 @@ config = {
|
|||
"jittest": [],
|
||||
"jittest-chunked": []
|
||||
},
|
||||
"all_mozbase_suites": {
|
||||
"mozbase": []
|
||||
},
|
||||
"run_cmd_checks_enabled": True,
|
||||
"preflight_run_cmd_suites": [
|
||||
# NOTE 'enabled' is only here while we have unconsolidated configs
|
||||
|
@ -227,7 +215,6 @@ config = {
|
|||
"mozmill": [],
|
||||
"cppunittest": [],
|
||||
"jittest": [],
|
||||
"mozbase": [],
|
||||
},
|
||||
"download_minidump_stackwalk": True,
|
||||
"minidump_stackwalk_path": "macosx64-minidump_stackwalk",
|
||||
|
|
|
@ -41,7 +41,6 @@ config = {
|
|||
"cppunittest": "runcppunittests.py",
|
||||
"gtest": "rungtests.py",
|
||||
"jittest": "jit_test.py",
|
||||
"mozbase": "test.py",
|
||||
"mozmill": "runtestlist.py",
|
||||
},
|
||||
"minimum_tests_zip_dirs": [
|
||||
|
@ -95,14 +94,6 @@ config = {
|
|||
"run_filename": "runtests.py",
|
||||
"testsdir": "mochitest"
|
||||
},
|
||||
"mozbase": {
|
||||
"options": [
|
||||
"-b",
|
||||
"%(binary_path)s"
|
||||
],
|
||||
"run_filename": "test.py",
|
||||
"testsdir": "mozbase"
|
||||
},
|
||||
"mozmill": {
|
||||
"options": [
|
||||
"--binary=%(binary_path)s",
|
||||
|
@ -228,9 +219,6 @@ config = {
|
|||
"jittest": [],
|
||||
"jittest-chunked": [],
|
||||
},
|
||||
"all_mozbase_suites": {
|
||||
"mozbase": []
|
||||
},
|
||||
"run_cmd_checks_enabled": True,
|
||||
"preflight_run_cmd_suites": [
|
||||
{
|
||||
|
|
|
@ -30,7 +30,6 @@ config = {
|
|||
"cppunittest": "runcppunittests.py",
|
||||
"gtest": "rungtests.py",
|
||||
"jittest": "jit_test.py",
|
||||
"mozbase": "test.py",
|
||||
"mozmill": "runtestlist.py",
|
||||
},
|
||||
"minimum_tests_zip_dirs": [
|
||||
|
@ -84,14 +83,6 @@ config = {
|
|||
"run_filename": "runtests.py",
|
||||
"testsdir": "mochitest"
|
||||
},
|
||||
"mozbase": {
|
||||
"options": [
|
||||
"-b",
|
||||
"%(binary_path)s"
|
||||
],
|
||||
"run_filename": "test.py",
|
||||
"testsdir": "mozbase"
|
||||
},
|
||||
"mozmill": {
|
||||
"options": [
|
||||
"--binary=%(binary_path)s",
|
||||
|
@ -220,9 +211,6 @@ config = {
|
|||
"all_jittest_suites": {
|
||||
"jittest": []
|
||||
},
|
||||
"all_mozbase_suites": {
|
||||
"mozbase": []
|
||||
},
|
||||
"run_cmd_checks_enabled": True,
|
||||
"preflight_run_cmd_suites": [
|
||||
# NOTE 'enabled' is only here while we have unconsolidated configs
|
||||
|
@ -254,7 +242,6 @@ config = {
|
|||
"mozmill": [],
|
||||
"cppunittest": [],
|
||||
"jittest": [],
|
||||
"mozbase": [],
|
||||
},
|
||||
"download_minidump_stackwalk": True,
|
||||
"minidump_stackwalk_path": "win32-minidump_stackwalk.exe",
|
||||
|
|
|
@ -117,11 +117,6 @@ class MozharnessRunner(MozbuildObject):
|
|||
"config": desktop_unittest_config + [
|
||||
"--jittest-suite", "jittest"]
|
||||
},
|
||||
"mozbase": {
|
||||
"script": "desktop_unittest.py",
|
||||
"config": desktop_unittest_config + [
|
||||
"--mozbase-suite", "mozbase"]
|
||||
},
|
||||
"marionette": {
|
||||
"script": "marionette.py",
|
||||
"config": ["--config-file", self.config_path("marionette",
|
||||
|
|
|
@ -39,7 +39,7 @@ from mozharness.mozilla.testing.codecoverage import (
|
|||
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
|
||||
|
||||
SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell',
|
||||
'mozbase', 'mozmill']
|
||||
'mozmill']
|
||||
SUITE_DEFAULT_E10S = ['mochitest', 'reftest']
|
||||
SUITE_NO_E10S = ['xpcshell']
|
||||
|
||||
|
@ -96,14 +96,6 @@ class DesktopUnittest(TestingMixin, MercurialScript, MozbaseMixin,
|
|||
"Suites are defined in the config file\n."
|
||||
"Examples: 'jittest'"}
|
||||
],
|
||||
[['--mozbase-suite', ], {
|
||||
"action": "extend",
|
||||
"dest": "specified_mozbase_suites",
|
||||
"type": "string",
|
||||
"help": "Specify which mozbase suite to run. "
|
||||
"Suites are defined in the config file\n."
|
||||
"Examples: 'mozbase'"}
|
||||
],
|
||||
[['--mozmill-suite', ], {
|
||||
"action": "extend",
|
||||
"dest": "specified_mozmill_suites",
|
||||
|
@ -217,7 +209,6 @@ class DesktopUnittest(TestingMixin, MercurialScript, MozbaseMixin,
|
|||
('specified_cppunittest_suites', 'cppunit'),
|
||||
('specified_gtest_suites', 'gtest'),
|
||||
('specified_jittest_suites', 'jittest'),
|
||||
('specified_mozbase_suites', 'mozbase'),
|
||||
('specified_mozmill_suites', 'mozmill'),
|
||||
)
|
||||
for s, prefix in suites:
|
||||
|
@ -274,7 +265,6 @@ class DesktopUnittest(TestingMixin, MercurialScript, MozbaseMixin,
|
|||
'blobber_upload_dir')
|
||||
dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'],
|
||||
"jit-test", "jit-test")
|
||||
dirs['abs_mozbase_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozbase")
|
||||
dirs['abs_mozmill_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozmill")
|
||||
|
||||
if os.path.isabs(c['virtualenv_path']):
|
||||
|
|
|
@ -30,7 +30,6 @@ class Output(object):
|
|||
|
||||
def summarize(self):
|
||||
suites = []
|
||||
vals = []
|
||||
test_results = {
|
||||
'framework': {
|
||||
'name': 'raptor',
|
||||
|
@ -44,6 +43,7 @@ class Output(object):
|
|||
return
|
||||
|
||||
for test in self.results:
|
||||
vals = []
|
||||
subtests = []
|
||||
suite = {
|
||||
'name': test.name,
|
||||
|
@ -71,10 +71,10 @@ class Output(object):
|
|||
# u'https://www.amazon.com/s/url=search-alias%3Daps&field-keywords=laptop',
|
||||
# u'unit': u'ms', u'alert_threshold': 2}
|
||||
|
||||
for key, values in test.measurements.iteritems():
|
||||
for measurement_name, replicates in test.measurements.iteritems():
|
||||
new_subtest = {}
|
||||
new_subtest['name'] = test.name + "-" + key
|
||||
new_subtest['replicates'] = values
|
||||
new_subtest['name'] = test.name + "-" + measurement_name
|
||||
new_subtest['replicates'] = replicates
|
||||
new_subtest['lowerIsBetter'] = test.lower_is_better
|
||||
new_subtest['alertThreshold'] = float(test.alert_threshold)
|
||||
new_subtest['value'] = 0
|
||||
|
@ -82,8 +82,8 @@ class Output(object):
|
|||
|
||||
filtered_values = filter.ignore_first(new_subtest['replicates'], 1)
|
||||
new_subtest['value'] = filter.median(filtered_values)
|
||||
vals.append(new_subtest['value'])
|
||||
|
||||
vals.append([new_subtest['value'], new_subtest['name']])
|
||||
subtests.append(new_subtest)
|
||||
|
||||
elif test.type == "benchmark":
|
||||
|
@ -97,14 +97,22 @@ class Output(object):
|
|||
subtests, vals = self.parseWebaudioOutput(test)
|
||||
suite['subtests'] = subtests
|
||||
|
||||
# if there is more than one subtest, calculate a summary result
|
||||
if len(subtests) > 1:
|
||||
suite['value'] = self.construct_summary(vals, testname=test.name)
|
||||
|
||||
else:
|
||||
LOG.error("output.summarize received unsupported test results type")
|
||||
return
|
||||
|
||||
# for pageload tests, if there are > 1 subtests here, that means there
|
||||
# were multiple measurements captured in each single pageload; we want
|
||||
# to get the mean of those values and report 1 overall 'suite' value
|
||||
# for the page; so that each test page/URL only has 1 line output
|
||||
# on treeherder/perfherder (all replicates available in the JSON)
|
||||
|
||||
# for benchmarks there is generally more than one subtest in each cycle
|
||||
# and a benchmark-specific formula is needed to calculate the final score
|
||||
|
||||
if len(subtests) > 1:
|
||||
suite['value'] = self.construct_summary(vals, testname=test.name)
|
||||
|
||||
self.summarized_results = test_results
|
||||
|
||||
def parseSpeedometerOutput(self, test):
|
||||
|
@ -401,6 +409,6 @@ class Output(object):
|
|||
elif testname.startswith('raptor-webaudio'):
|
||||
return self.webaudio_score(vals)
|
||||
elif len(vals) > 1:
|
||||
return filter.geometric_mean([i for i, j in vals])
|
||||
return round(filter.geometric_mean([i for i, j in vals]), 2)
|
||||
else:
|
||||
return filter.mean([i for i, j in vals])
|
||||
return round(filter.mean([i for i, j in vals]), 2)
|
||||
|
|
|
@ -20,34 +20,40 @@ page_timeout = 30000
|
|||
apps = firefox
|
||||
test_url = https://docs.google.com/document/d/1US-07msg12slQtI_xchzYxcKlTs6Fp7WqIc6W5GK5M8/edit?usp=sharing
|
||||
playback_recordings = google-docs.mp
|
||||
measure = fnbpaint
|
||||
measure = fnbpaint, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-google-sheets-firefox]
|
||||
apps = firefox
|
||||
test_url = https://docs.google.com/spreadsheets/d/1jT9qfZFAeqNoOK97gruc34Zb7y_Q-O_drZ8kSXT-4D4/edit?usp=sharing
|
||||
playback_recordings = google-sheets.mp
|
||||
measure = fnbpaint
|
||||
measure = fnbpaint, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-google-slides-firefox]
|
||||
apps = firefox
|
||||
test_url = https://docs.google.com/presentation/d/1Ici0ceWwpFvmIb3EmKeWSq_vAQdmmdFcWqaiLqUkJng/edit?usp=sharing
|
||||
playback_recordings = google-slides.mp
|
||||
measure = fnbpaint
|
||||
measure = fnbpaint, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-google-docs-chrome]
|
||||
apps = chrome
|
||||
test_url = https://docs.google.com/document/d/1US-07msg12slQtI_xchzYxcKlTs6Fp7WqIc6W5GK5M8/edit?usp=sharing
|
||||
playback_recordings = google-docs.mp
|
||||
measure = fcp
|
||||
measure = fcp, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-google-sheets-chrome]
|
||||
apps = chrome
|
||||
test_url = https://docs.google.com/spreadsheets/d/1jT9qfZFAeqNoOK97gruc34Zb7y_Q-O_drZ8kSXT-4D4/edit?usp=sharing
|
||||
playback_recordings = google-sheets.mp
|
||||
measure = fcp
|
||||
measure = fcp, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-google-slides-chrome]
|
||||
apps = chrome
|
||||
test_url = https://docs.google.com/presentation/d/1Ici0ceWwpFvmIb3EmKeWSq_vAQdmmdFcWqaiLqUkJng/edit?usp=sharing
|
||||
playback_recordings = google-slides.mp
|
||||
measure = fcp
|
||||
measure = fcp, hero
|
||||
hero = hero1
|
||||
|
|
|
@ -19,13 +19,15 @@ alert_threshold = 2.0
|
|||
apps = firefox
|
||||
test_url = https://www.amazon.com/s/url=search-alias%3Daps&field-keywords=laptop
|
||||
playback_recordings = amazon.mp
|
||||
measure = fnbpaint
|
||||
measure = fnbpaint, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-tp6-facebook-firefox]
|
||||
apps = firefox
|
||||
test_url = https://www.facebook.com
|
||||
playback_recordings = facebook.mp
|
||||
measure = fnbpaint
|
||||
measure = fnbpaint, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-tp6-google-firefox]
|
||||
apps = firefox
|
||||
|
@ -34,34 +36,40 @@ apps = firefox
|
|||
# to be loaded into that page also; resulting in 2 fnbpaint values etc.
|
||||
test_url = https://www.google.com/search?hl=en&q=barack+obama&cad=h
|
||||
playback_recordings = google-search.mp
|
||||
measure = fnbpaint
|
||||
measure = fnbpaint, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-tp6-youtube-firefox]
|
||||
apps = firefox
|
||||
test_url = https://www.youtube.com
|
||||
playback_recordings = youtube.mp
|
||||
measure = fnbpaint
|
||||
measure = fnbpaint, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-tp6-amazon-chrome]
|
||||
apps = chrome
|
||||
test_url = https://www.amazon.com/s/url=search-alias%3Daps&field-keywords=laptop
|
||||
playback_recordings = amazon.mp
|
||||
measure = fcp
|
||||
measure = fcp, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-tp6-facebook-chrome]
|
||||
apps = chrome
|
||||
test_url = https://www.facebook.com
|
||||
playback_recordings = facebook.mp
|
||||
measure = fcp
|
||||
measure = fcp, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-tp6-google-chrome]
|
||||
apps = chrome
|
||||
test_url = https://www.google.com/#hl=en&q=barack+obama
|
||||
playback_recordings = google-search.mp
|
||||
measure = fcp
|
||||
measure = fcp, hero
|
||||
hero = hero1
|
||||
|
||||
[raptor-tp6-youtube-chrome]
|
||||
apps = chrome
|
||||
test_url = https://www.youtube.com
|
||||
playback_recordings = youtube.mp
|
||||
measure = fcp
|
||||
measure = fcp, hero
|
||||
hero = hero1
|
||||
|
|
|
@ -268,8 +268,7 @@ BackgroundHangManager::BackgroundHangManager()
|
|||
|
||||
mHangMonitorThread = PR_CreateThread(
|
||||
PR_USER_THREAD, MonitorThread, this,
|
||||
PR_PRIORITY_LOW, PR_GLOBAL_THREAD, PR_JOINABLE_THREAD,
|
||||
nsIThreadManager::DEFAULT_STACK_SIZE);
|
||||
PR_PRIORITY_LOW, PR_GLOBAL_THREAD, PR_JOINABLE_THREAD, 0);
|
||||
|
||||
MOZ_ASSERT(mHangMonitorThread, "Failed to create BHR monitor thread");
|
||||
|
||||
|
|
|
@ -34,6 +34,8 @@ async function test_ntp_theme(theme, isBrightText) {
|
|||
|
||||
await extension.startup();
|
||||
|
||||
Services.ppmm.sharedData.flush();
|
||||
|
||||
await ContentTask.spawn(browser, {
|
||||
isBrightText,
|
||||
background: hexToCSS(theme.colors.ntp_background),
|
||||
|
@ -53,6 +55,8 @@ async function test_ntp_theme(theme, isBrightText) {
|
|||
|
||||
await extension.unload();
|
||||
|
||||
Services.ppmm.sharedData.flush();
|
||||
|
||||
await ContentTask.spawn(browser, {
|
||||
originalBackground,
|
||||
originalColor,
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
* @returns {Promise} The task as a promise
|
||||
*/
|
||||
function test_ntp_theme(browser, theme, isBrightText) {
|
||||
Services.ppmm.sharedData.flush();
|
||||
return ContentTask.spawn(browser, {
|
||||
isBrightText,
|
||||
background: hexToCSS(theme.colors.ntp_background),
|
||||
|
@ -34,6 +35,7 @@ function test_ntp_theme(browser, theme, isBrightText) {
|
|||
* @returns {Promise} The task as a promise
|
||||
*/
|
||||
function test_ntp_default_theme(browser) {
|
||||
Services.ppmm.sharedData.flush();
|
||||
return ContentTask.spawn(browser, {
|
||||
background: hexToCSS("#F9F9FA"),
|
||||
color: hexToCSS("#0C0C0D"),
|
||||
|
|
|
@ -780,15 +780,12 @@ var Bookmarks = Object.freeze({
|
|||
// If we're updating a tag, we must notify all the tagged bookmarks
|
||||
// about the change.
|
||||
if (isTagging) {
|
||||
let URIs = PlacesUtils.tagging.getURIsForTag(updatedItem.title);
|
||||
for (let uri of URIs) {
|
||||
for (let entry of (await fetchBookmarksByURL({ url: new URL(uri.spec) }, true))) {
|
||||
notify(observers, "onItemChanged", [ entry._id, "tags", false, "",
|
||||
PlacesUtils.toPRTime(entry.lastModified),
|
||||
entry.type, entry._parentId,
|
||||
entry.guid, entry.parentGuid,
|
||||
"", updatedItem.source ]);
|
||||
}
|
||||
for (let entry of (await fetchBookmarksByTags({ tags: [updatedItem.title] }, true))) {
|
||||
notify(observers, "onItemChanged", [ entry._id, "tags", false, "",
|
||||
PlacesUtils.toPRTime(entry.lastModified),
|
||||
entry.type, entry._parentId,
|
||||
entry.guid, entry.parentGuid,
|
||||
"", updatedItem.source ]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1200,6 +1197,13 @@ var Bookmarks = Object.freeze({
|
|||
* retrieves the most recent item with the specified guid prefix.
|
||||
* To retrieve ALL of the bookmarks for that guid prefix, you must pass
|
||||
* in an onResult callback, that will be invoked once for each bookmark.
|
||||
* - tags
|
||||
* Retrieves the most recent item with all the specified tags.
|
||||
* The tags are matched in a case-insensitive way.
|
||||
* To retrieve ALL of the bookmarks having these tags, pass in an
|
||||
* onResult callback, that will be invoked once for each bookmark.
|
||||
* Note, there can be multiple bookmarks for the same url, if you need
|
||||
* unique tagged urls you can filter duplicates by accumulating in a Set.
|
||||
*
|
||||
* @param guidOrInfo
|
||||
* The globally unique identifier of the item to fetch, or an
|
||||
|
@ -1235,15 +1239,18 @@ var Bookmarks = Object.freeze({
|
|||
info = { guid: guidOrInfo };
|
||||
} else if (Object.keys(info).length == 1) {
|
||||
// Just a faster code path.
|
||||
if (!["url", "guid", "parentGuid", "index", "guidPrefix"].includes(Object.keys(info)[0]))
|
||||
if (!["url", "guid", "parentGuid",
|
||||
"index", "guidPrefix", "tags"].includes(Object.keys(info)[0])) {
|
||||
throw new Error(`Unexpected number of conditions provided: 0`);
|
||||
}
|
||||
} else {
|
||||
// Only one condition at a time can be provided.
|
||||
let conditionsCount = [
|
||||
v => v.hasOwnProperty("guid"),
|
||||
v => v.hasOwnProperty("parentGuid") && v.hasOwnProperty("index"),
|
||||
v => v.hasOwnProperty("url"),
|
||||
v => v.hasOwnProperty("guidPrefix")
|
||||
v => v.hasOwnProperty("guidPrefix"),
|
||||
v => v.hasOwnProperty("tags")
|
||||
].reduce((old, fn) => old + fn(info) | 0, 0);
|
||||
if (conditionsCount != 1)
|
||||
throw new Error(`Unexpected number of conditions provided: ${conditionsCount}`);
|
||||
|
@ -1274,6 +1281,9 @@ var Bookmarks = Object.freeze({
|
|||
results = await fetchBookmarkByPosition(fetchInfo, options && options.concurrent);
|
||||
else if (fetchInfo.hasOwnProperty("guidPrefix"))
|
||||
results = await fetchBookmarksByGUIDPrefix(fetchInfo, options && options.concurrent);
|
||||
else if (fetchInfo.hasOwnProperty("tags")) {
|
||||
results = await fetchBookmarksByTags(fetchInfo, options && options.concurrent);
|
||||
}
|
||||
|
||||
if (!results)
|
||||
return null;
|
||||
|
@ -2054,6 +2064,42 @@ async function fetchBookmarkByPosition(info, concurrent) {
|
|||
query);
|
||||
}
|
||||
|
||||
async function fetchBookmarksByTags(info, concurrent) {
|
||||
let query = async function(db) {
|
||||
let rows = await db.executeCached(
|
||||
`SELECT b.guid, IFNULL(p.guid, "") AS parentGuid, b.position AS 'index',
|
||||
b.dateAdded, b.lastModified, b.type, IFNULL(b.title, "") AS title,
|
||||
h.url AS url, b.id AS _id, b.parent AS _parentId,
|
||||
NULL AS _childCount,
|
||||
p.parent AS _grandParentId, b.syncStatus AS _syncStatus
|
||||
FROM moz_bookmarks b
|
||||
JOIN moz_bookmarks p ON p.id = b.parent
|
||||
JOIN moz_bookmarks g ON g.id = p.parent
|
||||
JOIN moz_places h ON h.id = b.fk
|
||||
WHERE g.guid <> ? AND b.fk IN (
|
||||
SELECT b2.fk FROM moz_bookmarks b2
|
||||
JOIN moz_bookmarks p2 ON p2.id = b2.parent
|
||||
JOIN moz_bookmarks g2 ON g2.id = p2.parent
|
||||
WHERE g2.guid = ?
|
||||
AND lower(p2.title) IN (
|
||||
${new Array(info.tags.length).fill("?").join(",")}
|
||||
)
|
||||
GROUP BY b2.fk HAVING count(*) = ${info.tags.length}
|
||||
)
|
||||
ORDER BY b.lastModified DESC
|
||||
`, [Bookmarks.tagsGuid, Bookmarks.tagsGuid].concat(info.tags.map(t => t.toLowerCase())));
|
||||
|
||||
return rows.length ? rowsToItemsArray(rows) : null;
|
||||
};
|
||||
|
||||
if (concurrent) {
|
||||
let db = await PlacesUtils.promiseDBConnection();
|
||||
return query(db);
|
||||
}
|
||||
return PlacesUtils.withConnectionWrapper("Bookmarks.jsm: fetchBookmarksByTags",
|
||||
query);
|
||||
}
|
||||
|
||||
async function fetchBookmarksByGUIDPrefix(info, concurrent) {
|
||||
let query = async function(db) {
|
||||
let rows = await db.executeCached(
|
||||
|
|
|
@ -1597,8 +1597,10 @@ PT.RenameTag.prototype = {
|
|||
// For now this is implemented by untagging and tagging all the bookmarks.
|
||||
// We should create a specialized bookmarking API to just rename the tag.
|
||||
let onUndo = [], onRedo = [];
|
||||
let urls = PlacesUtils.tagging.getURIsForTag(oldTag);
|
||||
if (urls.length > 0) {
|
||||
let urls = new Set();
|
||||
await PlacesUtils.bookmarks.fetch({tags: [oldTag]}, b => urls.add(b.url));
|
||||
if (urls.size > 0) {
|
||||
urls = Array.from(urls);
|
||||
let tagTxn = TransactionsHistory.getRawTransaction(
|
||||
PT.Tag({ urls, tags: [tag] })
|
||||
);
|
||||
|
|
|
@ -252,7 +252,8 @@ const BOOKMARK_VALIDATORS = Object.freeze({
|
|||
keyword: simpleValidateFunc(v => (typeof(v) == "string") && v.length),
|
||||
charset: simpleValidateFunc(v => (typeof(v) == "string") && v.length),
|
||||
postData: simpleValidateFunc(v => (typeof(v) == "string") && v.length),
|
||||
tags: simpleValidateFunc(v => Array.isArray(v) && v.length),
|
||||
tags: simpleValidateFunc(v => Array.isArray(v) && v.length &&
|
||||
v.every(item => item && typeof item == "string")),
|
||||
});
|
||||
|
||||
// Sync bookmark records can contain additional properties.
|
||||
|
|
|
@ -1628,19 +1628,13 @@ class SyncedBookmarksMirror {
|
|||
WITH RECURSIVE
|
||||
${LocalItemsSQLFragment}
|
||||
INSERT INTO itemsToUpload(id, guid, syncChangeCounter, parentGuid,
|
||||
parentTitle, dateAdded, type, title, isQuery,
|
||||
url, tags, keyword, feedURL, siteURL,
|
||||
parentTitle, dateAdded, type, title, placeId,
|
||||
isQuery, url, keyword, feedURL, siteURL,
|
||||
position, tagFolderName)
|
||||
SELECT s.id, s.guid, s.syncChangeCounter, s.parentGuid, s.parentTitle,
|
||||
s.dateAdded / 1000, s.type, s.title,
|
||||
s.dateAdded / 1000, s.type, s.title, s.placeId,
|
||||
IFNULL(SUBSTR(h.url, 1, 6) = 'place:', 0) AS isQuery,
|
||||
h.url,
|
||||
(SELECT GROUP_CONCAT(t.title, ',') FROM moz_bookmarks e
|
||||
JOIN moz_bookmarks t ON t.id = e.parent
|
||||
JOIN moz_bookmarks r ON r.id = t.parent
|
||||
WHERE s.type = :bookmarkType AND
|
||||
r.guid = :tagsGuid AND
|
||||
e.fk = h.id),
|
||||
(SELECT keyword FROM moz_keywords WHERE place_id = h.id),
|
||||
(SELECT a.content FROM moz_items_annos a
|
||||
JOIN moz_anno_attributes n ON n.id = a.anno_attribute_id
|
||||
|
@ -1660,9 +1654,7 @@ class SyncedBookmarksMirror {
|
|||
LEFT JOIN idsToWeaklyUpload w ON w.id = s.id
|
||||
WHERE s.syncChangeCounter >= 1 OR
|
||||
w.id NOT NULL`,
|
||||
{ bookmarkType: PlacesUtils.bookmarks.TYPE_BOOKMARK,
|
||||
tagsGuid: PlacesUtils.bookmarks.tagsGuid,
|
||||
folderType: PlacesUtils.bookmarks.TYPE_FOLDER,
|
||||
{ folderType: PlacesUtils.bookmarks.TYPE_FOLDER,
|
||||
feedURLAnno: PlacesUtils.LMANNO_FEEDURI,
|
||||
siteURLAnno: PlacesUtils.LMANNO_SITEURI });
|
||||
|
||||
|
@ -1673,6 +1665,13 @@ class SyncedBookmarksMirror {
|
|||
SELECT b.guid, b.parent, b.position FROM moz_bookmarks b
|
||||
JOIN itemsToUpload o ON o.id = b.parent`);
|
||||
|
||||
// Stage tags for outgoing bookmarks.
|
||||
await this.db.execute(`
|
||||
INSERT INTO tagsToUpload(id, tag)
|
||||
SELECT o.id, t.tag
|
||||
FROM localTags t
|
||||
JOIN itemsToUpload o ON o.placeId = t.placeId`);
|
||||
|
||||
// Finally, stage tombstones for deleted items. Ignore conflicts if we have
|
||||
// tombstones for undeleted items; Places Maintenance should clean these up.
|
||||
await this.db.execute(`
|
||||
|
@ -1690,6 +1689,7 @@ class SyncedBookmarksMirror {
|
|||
async fetchLocalChangeRecords() {
|
||||
let changeRecords = {};
|
||||
let childRecordIdsByLocalParentId = new Map();
|
||||
let tagsByLocalId = new Map();
|
||||
|
||||
let childGuidRows = await this.db.execute(`
|
||||
SELECT parentId, guid FROM structureToUpload
|
||||
|
@ -1699,17 +1699,31 @@ class SyncedBookmarksMirror {
|
|||
let localParentId = row.getResultByName("parentId");
|
||||
let childRecordId = PlacesSyncUtils.bookmarks.guidToRecordId(
|
||||
row.getResultByName("guid"));
|
||||
if (childRecordIdsByLocalParentId.has(localParentId)) {
|
||||
let childRecordIds = childRecordIdsByLocalParentId.get(localParentId);
|
||||
let childRecordIds = childRecordIdsByLocalParentId.get(localParentId);
|
||||
if (childRecordIds) {
|
||||
childRecordIds.push(childRecordId);
|
||||
} else {
|
||||
childRecordIdsByLocalParentId.set(localParentId, [childRecordId]);
|
||||
}
|
||||
}
|
||||
|
||||
let tagRows = await this.db.execute(`
|
||||
SELECT id, tag FROM tagsToUpload`);
|
||||
|
||||
for await (let row of yieldingIterator(tagRows)) {
|
||||
let localId = row.getResultByName("id");
|
||||
let tag = row.getResultByName("tag");
|
||||
let tags = tagsByLocalId.get(localId);
|
||||
if (tags) {
|
||||
tags.push(tag);
|
||||
} else {
|
||||
tagsByLocalId.set(localId, [tag]);
|
||||
}
|
||||
}
|
||||
|
||||
let itemRows = await this.db.execute(`
|
||||
SELECT id, syncChangeCounter, guid, isDeleted, type, isQuery,
|
||||
tagFolderName, keyword, tags, url, IFNULL(title, "") AS title,
|
||||
tagFolderName, keyword, url, IFNULL(title, "") AS title,
|
||||
feedURL, siteURL, position, parentGuid,
|
||||
IFNULL(parentTitle, "") AS parentTitle, dateAdded
|
||||
FROM itemsToUpload`);
|
||||
|
@ -1779,9 +1793,10 @@ class SyncedBookmarksMirror {
|
|||
if (keyword) {
|
||||
bookmarkCleartext.keyword = keyword;
|
||||
}
|
||||
let tags = row.getResultByName("tags");
|
||||
let localId = row.getResultByName("id");
|
||||
let tags = tagsByLocalId.get(localId);
|
||||
if (tags) {
|
||||
bookmarkCleartext.tags = tags.split(",");
|
||||
bookmarkCleartext.tags = tags;
|
||||
}
|
||||
changeRecords[recordId] = new BookmarkChangeRecord(
|
||||
syncChangeCounter, bookmarkCleartext);
|
||||
|
@ -2238,18 +2253,15 @@ async function initializeTempMirrorEntities(db) {
|
|||
"v.dateAdded" is in milliseconds. */
|
||||
(CASE WHEN b.dateAdded / 1000 < v.dateAdded THEN b.dateAdded
|
||||
ELSE v.dateAdded * 1000 END),
|
||||
v.title, h.id, u.newPlaceId, v.keyword,
|
||||
v.feedURL, v.siteURL
|
||||
v.title, h.id, (SELECT n.id FROM moz_places n
|
||||
WHERE n.url_hash = u.hash AND
|
||||
n.url = u.url),
|
||||
v.keyword, v.feedURL, v.siteURL
|
||||
FROM items v
|
||||
JOIN mergeStates r ON r.mergedGuid = v.guid
|
||||
LEFT JOIN moz_bookmarks b ON b.guid = r.localGuid
|
||||
LEFT JOIN moz_places h ON h.id = b.fk
|
||||
LEFT JOIN (
|
||||
SELECT h.id AS newPlaceId, u.id AS urlId
|
||||
FROM urls u
|
||||
JOIN moz_places h ON h.url_hash = u.hash AND
|
||||
h.url = u.url
|
||||
) u ON u.urlId = v.urlId
|
||||
LEFT JOIN urls u ON u.id = v.urlId
|
||||
WHERE r.mergedGuid <> '${PlacesUtils.bookmarks.rootGuid}'`);
|
||||
|
||||
// Changes local GUIDs to remote GUIDs, drops local tombstones for revived
|
||||
|
@ -2720,9 +2732,9 @@ async function initializeTempMirrorEntities(db) {
|
|||
dateAdded INTEGER, /* In milliseconds. */
|
||||
type INTEGER,
|
||||
title TEXT,
|
||||
placeId INTEGER,
|
||||
isQuery BOOLEAN NOT NULL DEFAULT 0,
|
||||
url TEXT,
|
||||
tags TEXT,
|
||||
tagFolderName TEXT,
|
||||
keyword TEXT,
|
||||
feedURL TEXT,
|
||||
|
@ -2736,6 +2748,13 @@ async function initializeTempMirrorEntities(db) {
|
|||
ON DELETE CASCADE,
|
||||
position INTEGER NOT NULL
|
||||
) WITHOUT ROWID`);
|
||||
|
||||
await db.execute(`CREATE TEMP TABLE tagsToUpload(
|
||||
id INTEGER REFERENCES itemsToUpload(id)
|
||||
ON DELETE CASCADE,
|
||||
tag TEXT,
|
||||
PRIMARY KEY(id, tag)
|
||||
) WITHOUT ROWID`);
|
||||
}
|
||||
|
||||
async function resetMirror(db) {
|
||||
|
|
|
@ -48,15 +48,6 @@ interface nsITaggingService : nsISupports
|
|||
in nsIVariant aTags,
|
||||
[optional] in unsigned short aSource);
|
||||
|
||||
/**
|
||||
* Retrieves all URLs tagged with the given tag.
|
||||
*
|
||||
* @param aTag
|
||||
* tag name
|
||||
* @returns Array of uris tagged with aTag.
|
||||
*/
|
||||
nsIVariant getURIsForTag(in AString aTag);
|
||||
|
||||
/**
|
||||
* Retrieves all tags set for the given URL.
|
||||
*
|
||||
|
|
|
@ -229,42 +229,6 @@ TaggingService.prototype = {
|
|||
}
|
||||
},
|
||||
|
||||
// nsITaggingService
|
||||
getURIsForTag: function TS_getURIsForTag(aTagName) {
|
||||
if (!aTagName || aTagName.length == 0) {
|
||||
throw Components.Exception("Invalid tag name", Cr.NS_ERROR_INVALID_ARG);
|
||||
}
|
||||
|
||||
if (/^\s|\s$/.test(aTagName)) {
|
||||
throw Components.Exception("Tag passed to getURIsForTag was not trimmed",
|
||||
Cr.NS_ERROR_INVALID_ARG);
|
||||
}
|
||||
|
||||
let uris = [];
|
||||
let tagId = this._getItemIdForTag(aTagName);
|
||||
if (tagId == -1)
|
||||
return uris;
|
||||
|
||||
let db = PlacesUtils.history.DBConnection;
|
||||
let stmt = db.createStatement(
|
||||
`SELECT h.url FROM moz_places h
|
||||
JOIN moz_bookmarks b ON b.fk = h.id
|
||||
WHERE b.parent = :tag_id`
|
||||
);
|
||||
stmt.params.tag_id = tagId;
|
||||
try {
|
||||
while (stmt.executeStep()) {
|
||||
try {
|
||||
uris.push(Services.io.newURI(stmt.row.url));
|
||||
} catch (ex) {}
|
||||
}
|
||||
} finally {
|
||||
stmt.finalize();
|
||||
}
|
||||
|
||||
return uris;
|
||||
},
|
||||
|
||||
// nsITaggingService
|
||||
getTagsForURI: function TS_getTagsForURI(aURI, aCount) {
|
||||
if (!aURI) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* Any copyright is dedicated to the Public Domain.
|
||||
* http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
add_task(async function() {
|
||||
add_task(async function test_fetchTags() {
|
||||
let tags = await PlacesUtils.bookmarks.fetchTags();
|
||||
Assert.deepEqual(tags, []);
|
||||
|
||||
|
@ -34,3 +34,55 @@ add_task(async function() {
|
|||
{ name: "3", count: 1 },
|
||||
]);
|
||||
});
|
||||
|
||||
add_task(async function test_fetch_by_tags() {
|
||||
Assert.throws(() => PlacesUtils.bookmarks.fetch({ tags: "" }),
|
||||
/Invalid value for property 'tags'/);
|
||||
Assert.throws(() => PlacesUtils.bookmarks.fetch({ tags: [] }),
|
||||
/Invalid value for property 'tags'/);
|
||||
Assert.throws(() => PlacesUtils.bookmarks.fetch({ tags: null }),
|
||||
/Invalid value for property 'tags'/);
|
||||
Assert.throws(() => PlacesUtils.bookmarks.fetch({ tags: [""] }),
|
||||
/Invalid value for property 'tags'/);
|
||||
Assert.throws(() => PlacesUtils.bookmarks.fetch({ tags: ["valid", null] }),
|
||||
/Invalid value for property 'tags'/);
|
||||
|
||||
info("Add bookmarks with tags.");
|
||||
let bm1 = await PlacesUtils.bookmarks.insert({
|
||||
url: "http://bacon.org/",
|
||||
parentGuid: PlacesUtils.bookmarks.unfiledGuid,
|
||||
});
|
||||
PlacesUtils.tagging.tagURI(Services.io.newURI(bm1.url.href), ["egg", "ratafià"]);
|
||||
let bm2 = await PlacesUtils.bookmarks.insert({
|
||||
url: "http://mushroom.org/",
|
||||
parentGuid: PlacesUtils.bookmarks.unfiledGuid,
|
||||
});
|
||||
PlacesUtils.tagging.tagURI(Services.io.newURI(bm2.url.href), ["egg"]);
|
||||
|
||||
info("Fetch a single tag.");
|
||||
let bms = [];
|
||||
Assert.equal((await PlacesUtils.bookmarks.fetch({tags: ["egg"]}, b => bms.push(b))).guid,
|
||||
bm2.guid, "Found the expected recent bookmark");
|
||||
Assert.deepEqual(bms.map(b => b.guid), [bm2.guid, bm1.guid],
|
||||
"Found the expected bookmarks");
|
||||
|
||||
info("Fetch multiple tags.");
|
||||
bms = [];
|
||||
Assert.equal((await PlacesUtils.bookmarks.fetch({tags: ["egg", "ratafià"]}, b => bms.push(b))).guid,
|
||||
bm1.guid, "Found the expected recent bookmark");
|
||||
Assert.deepEqual(bms.map(b => b.guid), [bm1.guid],
|
||||
"Found the expected bookmarks");
|
||||
|
||||
info("Fetch a nonexisting tag.");
|
||||
bms = [];
|
||||
Assert.equal(await PlacesUtils.bookmarks.fetch({tags: ["egg", "tomato"]}, b => bms.push(b)),
|
||||
null, "Should not find any bookmark");
|
||||
Assert.deepEqual(bms, [], "Should not find any bookmark");
|
||||
|
||||
info("Check case insensitive");
|
||||
bms = [];
|
||||
Assert.equal((await PlacesUtils.bookmarks.fetch({tags: ["eGg", "raTafiÀ"]}, b => bms.push(b))).guid,
|
||||
bm1.guid, "Found the expected recent bookmark");
|
||||
Assert.deepEqual(bms.map(b => b.guid), [bm1.guid],
|
||||
"Found the expected bookmarks");
|
||||
});
|
||||
|
|
|
@ -961,12 +961,12 @@ add_task(async function test_rewrite_tag_queries() {
|
|||
|
||||
deepEqual(changesToUpload, {}, "Should not reupload any local records");
|
||||
|
||||
let urisWithTaggy = PlacesUtils.tagging.getURIsForTag("taggy");
|
||||
deepEqual(urisWithTaggy.map(uri => uri.spec).sort(), ["http://example.com/e"],
|
||||
let bmWithTaggy = await PlacesUtils.bookmarks.fetch({tags: ["taggy"]});
|
||||
equal(bmWithTaggy.url.href, "http://example.com/e",
|
||||
"Should insert bookmark with new tag");
|
||||
|
||||
let urisWithKitty = PlacesUtils.tagging.getURIsForTag("kitty");
|
||||
deepEqual(urisWithKitty.map(uri => uri.spec).sort(), ["http://example.com/d"],
|
||||
let bmWithKitty = await PlacesUtils.bookmarks.fetch({tags: ["kitty"]});
|
||||
equal(bmWithKitty.url.href, "http://example.com/d",
|
||||
"Should retain existing tag");
|
||||
|
||||
let { root: toolbarContainer } = PlacesUtils.getFolderContents(
|
||||
|
@ -1098,4 +1098,185 @@ add_task(async function test_date_added() {
|
|||
equal(bInfo.title, "B (remote)", "Should change local title for B");
|
||||
deepEqual(bInfo.dateAdded, bNewDateAdded,
|
||||
"Should take older date added for B");
|
||||
|
||||
await buf.finalize();
|
||||
await PlacesUtils.bookmarks.eraseEverything();
|
||||
await PlacesSyncUtils.bookmarks.reset();
|
||||
});
|
||||
|
||||
// Bug 1472435.
|
||||
add_task(async function test_duplicate_url_rows() {
|
||||
let buf = await openMirror("test_duplicate_url_rows");
|
||||
|
||||
let placesToInsert = [{
|
||||
guid: "placeAAAAAAA",
|
||||
href: "http://example.com",
|
||||
}, {
|
||||
guid: "placeBBBBBBB",
|
||||
href: "http://example.com",
|
||||
}, {
|
||||
guid: "placeCCCCCCC",
|
||||
href: "http://example.com/c",
|
||||
}];
|
||||
|
||||
let itemsToInsert = [{
|
||||
guid: "bookmarkAAAA",
|
||||
parentGuid: PlacesUtils.bookmarks.menuGuid,
|
||||
placeGuid: "placeAAAAAAA",
|
||||
localTitle: "A",
|
||||
remoteTitle: "A (remote)",
|
||||
}, {
|
||||
guid: "bookmarkBBBB",
|
||||
parentGuid: PlacesUtils.bookmarks.toolbarGuid,
|
||||
placeGuid: "placeBBBBBBB",
|
||||
localTitle: "B",
|
||||
remoteTitle: "B (remote)",
|
||||
}, {
|
||||
guid: "bookmarkCCCC",
|
||||
parentGuid: PlacesUtils.bookmarks.unfiledGuid,
|
||||
placeGuid: "placeCCCCCCC",
|
||||
localTitle: "C",
|
||||
remoteTitle: "C (remote)",
|
||||
}];
|
||||
|
||||
info("Manually insert local and remote items with duplicate URLs");
|
||||
await buf.db.executeTransaction(async function() {
|
||||
for (let { guid, href } of placesToInsert) {
|
||||
let url = new URL(href);
|
||||
await buf.db.executeCached(`
|
||||
INSERT INTO moz_places(url, url_hash, rev_host, hidden, frecency, guid)
|
||||
VALUES(:url, hash(:url), :revHost, 0, -1, :guid)`,
|
||||
{ url: url.href, revHost: PlacesUtils.getReversedHost(url), guid });
|
||||
|
||||
await buf.db.executeCached(`
|
||||
INSERT INTO urls(guid, url, hash, revHost)
|
||||
VALUES(:guid, :url, hash(:url), :revHost)`,
|
||||
{ guid, url: url.href, revHost: PlacesUtils.getReversedHost(url) });
|
||||
}
|
||||
|
||||
for (let { guid, parentGuid, placeGuid, localTitle, remoteTitle } of itemsToInsert) {
|
||||
await buf.db.executeCached(`
|
||||
INSERT INTO moz_bookmarks(guid, parent, fk, position, type, title,
|
||||
syncStatus, syncChangeCounter)
|
||||
VALUES(:guid, (SELECT id FROM moz_bookmarks WHERE guid = :parentGuid),
|
||||
(SELECT id FROM moz_places WHERE guid = :placeGuid),
|
||||
(SELECT count(*) FROM moz_bookmarks b
|
||||
JOIN moz_bookmarks p ON p.id = b.parent
|
||||
WHERE p.guid = :parentGuid), :type, :localTitle,
|
||||
:syncStatus, 1)`,
|
||||
{ guid, parentGuid, placeGuid,
|
||||
type: PlacesUtils.bookmarks.TYPE_BOOKMARK, localTitle,
|
||||
syncStatus: PlacesUtils.bookmarks.SYNC_STATUS.NEW });
|
||||
|
||||
await buf.db.executeCached(`
|
||||
INSERT INTO items(guid, needsMerge, kind, title, urlId)
|
||||
VALUES(:guid, 1, :kind, :remoteTitle,
|
||||
(SELECT id FROM urls WHERE guid = :placeGuid))`,
|
||||
{ guid, placeGuid, kind: SyncedBookmarksMirror.KIND.BOOKMARK,
|
||||
remoteTitle });
|
||||
|
||||
await buf.db.executeCached(`
|
||||
INSERT INTO structure(guid, parentGuid, position)
|
||||
VALUES(:guid, :parentGuid,
|
||||
IFNULL((SELECT count(*) FROM structure
|
||||
WHERE parentGuid = :parentGuid), 0))`,
|
||||
{ guid, parentGuid });
|
||||
}
|
||||
});
|
||||
|
||||
info("Apply mirror");
|
||||
let observer = expectBookmarkChangeNotifications();
|
||||
await buf.apply();
|
||||
deepEqual(await buf.fetchUnmergedGuids(), [], "Should merge all items");
|
||||
|
||||
await assertLocalTree(PlacesUtils.bookmarks.rootGuid, {
|
||||
guid: PlacesUtils.bookmarks.rootGuid,
|
||||
type: PlacesUtils.bookmarks.TYPE_FOLDER,
|
||||
index: 0,
|
||||
title: "",
|
||||
children: [{
|
||||
guid: PlacesUtils.bookmarks.menuGuid,
|
||||
type: PlacesUtils.bookmarks.TYPE_FOLDER,
|
||||
index: 0,
|
||||
title: BookmarksMenuTitle,
|
||||
children: [{
|
||||
guid: "bookmarkAAAA",
|
||||
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
|
||||
index: 0,
|
||||
title: "A (remote)",
|
||||
url: "http://example.com/",
|
||||
}],
|
||||
}, {
|
||||
guid: PlacesUtils.bookmarks.toolbarGuid,
|
||||
type: PlacesUtils.bookmarks.TYPE_FOLDER,
|
||||
index: 1,
|
||||
title: BookmarksToolbarTitle,
|
||||
children: [{
|
||||
guid: "bookmarkBBBB",
|
||||
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
|
||||
index: 0,
|
||||
title: "B (remote)",
|
||||
url: "http://example.com/",
|
||||
}],
|
||||
}, {
|
||||
guid: PlacesUtils.bookmarks.unfiledGuid,
|
||||
type: PlacesUtils.bookmarks.TYPE_FOLDER,
|
||||
index: 3,
|
||||
title: UnfiledBookmarksTitle,
|
||||
children: [{
|
||||
guid: "bookmarkCCCC",
|
||||
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
|
||||
index: 0,
|
||||
title: "C (remote)",
|
||||
url: "http://example.com/c",
|
||||
}],
|
||||
}, {
|
||||
guid: PlacesUtils.bookmarks.mobileGuid,
|
||||
type: PlacesUtils.bookmarks.TYPE_FOLDER,
|
||||
index: 4,
|
||||
title: MobileBookmarksTitle,
|
||||
}],
|
||||
}, "Should update titles for items with duplicate URLs");
|
||||
|
||||
let localItemIds = await PlacesUtils.promiseManyItemIds(["bookmarkAAAA",
|
||||
"bookmarkBBBB", "bookmarkCCCC"]);
|
||||
observer.check([{
|
||||
name: "onItemChanged",
|
||||
params: { itemId: localItemIds.get("bookmarkAAAA"), property: "title",
|
||||
isAnnoProperty: false, newValue: "A (remote)",
|
||||
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
|
||||
parentId: PlacesUtils.bookmarksMenuFolderId, guid: "bookmarkAAAA",
|
||||
parentGuid: PlacesUtils.bookmarks.menuGuid, oldValue: "A",
|
||||
source: PlacesUtils.bookmarks.SOURCES.SYNC },
|
||||
}, {
|
||||
name: "onItemChanged",
|
||||
params: { itemId: localItemIds.get("bookmarkBBBB"), property: "title",
|
||||
isAnnoProperty: false, newValue: "B (remote)",
|
||||
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
|
||||
parentId: PlacesUtils.toolbarFolderId, guid: "bookmarkBBBB",
|
||||
parentGuid: PlacesUtils.bookmarks.toolbarGuid, oldValue: "B",
|
||||
source: PlacesUtils.bookmarks.SOURCES.SYNC },
|
||||
}, {
|
||||
name: "onItemChanged",
|
||||
params: { itemId: localItemIds.get("bookmarkCCCC"), property: "title",
|
||||
isAnnoProperty: false, newValue: "C (remote)",
|
||||
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
|
||||
parentId: PlacesUtils.unfiledBookmarksFolderId,
|
||||
guid: "bookmarkCCCC",
|
||||
parentGuid: PlacesUtils.bookmarks.unfiledGuid, oldValue: "C",
|
||||
source: PlacesUtils.bookmarks.SOURCES.SYNC },
|
||||
}]);
|
||||
|
||||
info("Remove duplicate URLs from Places to avoid tripping debug asserts");
|
||||
await buf.db.executeTransaction(async function() {
|
||||
for (let { guid } of placesToInsert) {
|
||||
await buf.db.executeCached(`
|
||||
DELETE FROM moz_places WHERE guid = :guid`,
|
||||
{ guid });
|
||||
}
|
||||
});
|
||||
|
||||
await buf.finalize();
|
||||
await PlacesUtils.bookmarks.eraseEverything();
|
||||
await PlacesSyncUtils.bookmarks.reset();
|
||||
});
|
||||
|
|
|
@ -37,9 +37,11 @@ function shuffle(array) {
|
|||
return results;
|
||||
}
|
||||
|
||||
function assertTagForURLs(tag, urls, message) {
|
||||
let taggedURLs = PlacesUtils.tagging.getURIsForTag(tag).map(uri => uri.spec);
|
||||
deepEqual(taggedURLs.sort(compareAscending), urls.sort(compareAscending), message);
|
||||
async function assertTagForURLs(tag, urls, message) {
|
||||
let taggedURLs = new Set();
|
||||
await PlacesUtils.bookmarks.fetch({tags: [tag]}, b => taggedURLs.add(b.url.href));
|
||||
deepEqual(Array.from(taggedURLs).sort(compareAscending),
|
||||
urls.sort(compareAscending), message);
|
||||
}
|
||||
|
||||
function assertURLHasTags(url, tags, message) {
|
||||
|
@ -581,7 +583,7 @@ add_task(async function test_update_tags() {
|
|||
deepEqual(updatedItem.tags, ["foo", "baz"], "Should return updated tags");
|
||||
assertURLHasTags("https://mozilla.org", ["baz", "foo"],
|
||||
"Should update tags for URL");
|
||||
assertTagForURLs("bar", [], "Should remove existing tag");
|
||||
await assertTagForURLs("bar", [], "Should remove existing tag");
|
||||
}
|
||||
|
||||
info("Tags with whitespace");
|
||||
|
@ -666,7 +668,7 @@ add_task(async function test_pullChanges_tags() {
|
|||
deepEqual(Object.keys(changes).sort(),
|
||||
[firstItem.recordId, secondItem.recordId, taggedItem.recordId].sort(),
|
||||
"Should include tagged bookmarks after changing case");
|
||||
assertTagForURLs("TaGgY", ["https://example.org/", "https://mozilla.org/"],
|
||||
await assertTagForURLs("TaGgY", ["https://example.org/", "https://mozilla.org/"],
|
||||
"Should add tag for new URL");
|
||||
await setChangesSynced(changes);
|
||||
}
|
||||
|
@ -715,17 +717,17 @@ add_task(async function test_pullChanges_tags() {
|
|||
deepEqual(Object.keys(changes).sort(),
|
||||
[firstItem.recordId, secondItem.recordId, untaggedItem.recordId].sort(),
|
||||
"Should include tagged bookmarks after changing tag entry URI");
|
||||
assertTagForURLs("tricky", ["https://bugzilla.org/", "https://mozilla.org/"],
|
||||
await assertTagForURLs("tricky", ["https://bugzilla.org/", "https://mozilla.org/"],
|
||||
"Should remove tag entry for old URI");
|
||||
await setChangesSynced(changes);
|
||||
|
||||
bm.url = "https://example.com/";
|
||||
bm.url = "https://example.org/";
|
||||
await PlacesUtils.bookmarks.update(bm);
|
||||
changes = await PlacesSyncUtils.bookmarks.pullChanges();
|
||||
deepEqual(Object.keys(changes).sort(),
|
||||
[untaggedItem.recordId].sort(),
|
||||
[firstItem.recordId, secondItem.recordId, untaggedItem.recordId].sort(),
|
||||
"Should include tagged bookmarks after changing tag entry URL");
|
||||
assertTagForURLs("tricky", ["https://example.com/", "https://mozilla.org/"],
|
||||
await assertTagForURLs("tricky", ["https://example.org/", "https://mozilla.org/"],
|
||||
"Should remove tag entry for old URL");
|
||||
await setChangesSynced(changes);
|
||||
}
|
||||
|
@ -1313,13 +1315,13 @@ add_task(async function test_insert_tags() {
|
|||
title: "bar",
|
||||
}].map(info => PlacesSyncUtils.bookmarks.insert(info)));
|
||||
|
||||
assertTagForURLs("foo", ["https://example.com/", "https://example.org/"],
|
||||
await assertTagForURLs("foo", ["https://example.com/", "https://example.org/"],
|
||||
"2 URLs with new tag");
|
||||
assertTagForURLs("bar", ["https://example.com/"], "1 URL with existing tag");
|
||||
assertTagForURLs("baz", ["https://example.org/",
|
||||
await assertTagForURLs("bar", ["https://example.com/"], "1 URL with existing tag");
|
||||
await assertTagForURLs("baz", ["https://example.org/",
|
||||
"place:queryType=1&sort=12&maxResults=10"],
|
||||
"Should support tagging URLs and tag queries");
|
||||
assertTagForURLs("qux", ["place:queryType=1&sort=12&maxResults=10"],
|
||||
await assertTagForURLs("qux", ["place:queryType=1&sort=12&maxResults=10"],
|
||||
"Should support tagging tag queries");
|
||||
|
||||
await PlacesUtils.bookmarks.eraseEverything();
|
||||
|
@ -1353,7 +1355,7 @@ add_task(async function test_insert_tags_whitespace() {
|
|||
assertURLHasTags("https://example.net/", ["taggy"],
|
||||
"Should ignore dupes when setting tags");
|
||||
|
||||
assertTagForURLs("taggy", ["https://example.net/", "https://example.org/"],
|
||||
await assertTagForURLs("taggy", ["https://example.net/", "https://example.org/"],
|
||||
"Should exclude falsy tags");
|
||||
|
||||
PlacesUtils.tagging.untagURI(uri("https://example.org"), ["untrimmed", "taggy"]);
|
||||
|
|
|
@ -58,12 +58,6 @@ function run_test() {
|
|||
Assert.equal(uri2tags.length, 1);
|
||||
Assert.equal(uri2tags[0], "Tag 1");
|
||||
|
||||
// test getURIsForTag
|
||||
var tag1uris = tagssvc.getURIsForTag("tag 1");
|
||||
Assert.equal(tag1uris.length, 2);
|
||||
Assert.ok(tag1uris[0].equals(uri1));
|
||||
Assert.ok(tag1uris[1].equals(uri2));
|
||||
|
||||
// test untagging
|
||||
tagssvc.untagURI(uri1, ["tag 1"]);
|
||||
Assert.equal(tag1node.childCount, 1);
|
||||
|
|
|
@ -447,7 +447,7 @@ devtools.main:
|
|||
width: Toolbox width rounded up to the nearest 50px.
|
||||
session_id: The start time of the session in milliseconds since epoch (Unix Timestamp) e.g. 1396381378123.
|
||||
enter:
|
||||
objects: ["webconsole", "inspector", "jsdebugger", "styleeditor", "netmonitor", "storage", "other"]
|
||||
objects: ["accessibility", "application", "canvasdebugger", "dom", "inspector", "jsdebugger", "memory", "netmonitor", "options", "performance", "scratchpad", "shadereditor", "storage", "styleeditor", "webaudioeditor", "webconsole", "other", "fakeTool4242", "testBlankPanel", "testTool", "testtool1", "testTool1072208", "testtool2"]
|
||||
bug_numbers: [1441070]
|
||||
notification_emails: ["dev-developer-tools@lists.mozilla.org", "hkirschner@mozilla.com"]
|
||||
record_in_processes: ["main"]
|
||||
|
@ -459,11 +459,11 @@ devtools.main:
|
|||
width: Toolbox width rounded up to the nearest 50px.
|
||||
message_count: The number of cached console messages.
|
||||
start_state: debuggerStatement, breakpoint, exception, tab_switch, toolbox_show, initial_panel, toggle_settings_off, toggle_settings_on, key_shortcut, select_next_key, select_prev_key, tool_unloaded, inspect_dom, unknown etc.
|
||||
panel_name: The name of the panel opened, webconsole, inspector, jsdebugger, styleeditor, netmonitor, storage or other
|
||||
panel_name: The name of the panel opened or other
|
||||
cold: Is this the first time the current panel has been opened in this toolbox?
|
||||
session_id: The start time of the session in milliseconds since epoch (Unix Timestamp) e.g. 1396381378123.
|
||||
exit:
|
||||
objects: ["webconsole", "inspector", "jsdebugger", "styleeditor", "netmonitor", "storage", "other"]
|
||||
objects: ["accessibility", "application", "canvasdebugger", "dom", "inspector", "jsdebugger", "memory", "netmonitor", "options", "performance", "scratchpad", "shadereditor", "storage", "styleeditor", "webaudioeditor", "webconsole", "other", "fakeTool4242", "testBlankPanel", "testTool", "testtool1", "testTool1072208", "testtool2"]
|
||||
bug_numbers: [1455270]
|
||||
notification_emails: ["dev-developer-tools@lists.mozilla.org", "hkirschner@mozilla.com"]
|
||||
record_in_processes: ["main"]
|
||||
|
@ -473,8 +473,8 @@ devtools.main:
|
|||
extra_keys:
|
||||
host: "Toolbox host (positioning): bottom, side, window or other."
|
||||
width: Toolbox width rounded up to the nearest 50px.
|
||||
next_panel: The name of the panel closed, webconsole, inspector, jsdebugger, styleeditor, netmonitor, storage or other.
|
||||
panel_name: The name of the panel opened, webconsole, inspector, jsdebugger, styleeditor, netmonitor, storage or other
|
||||
next_panel: The name of the panel closed or other.
|
||||
panel_name: The name of the panel opened or other
|
||||
reason: debuggerStatement, breakpoint, exception, tab_switch, toolbox_show, initial_panel, toggle_settings_off, toggle_settings_on, key_shortcut, select_next_key, select_prev_key, tool_unloaded, inspect_dom, toolbox_closed, unknown etc.
|
||||
session_id: The start time of the session in milliseconds since epoch (Unix Timestamp) e.g. 1396381378123.
|
||||
activate:
|
||||
|
@ -642,4 +642,14 @@ devtools.main:
|
|||
trigger: "The cause of the filter change: error, warn, log, info, debug, css, netxhr, net, text or reset"
|
||||
active: Comma separated list of active filters.
|
||||
inactive: Comma separated list of inactive filters.
|
||||
session_id: The start time of the session in milliseconds since epoch (Unix Timestamp) e.g. 1396381378123.
|
||||
jump_to_definition:
|
||||
objects: ["webconsole"]
|
||||
bug_numbers: [1463101]
|
||||
notification_emails: ["dev-developer-tools@lists.mozilla.org", "hkirschner@mozilla.com"]
|
||||
record_in_processes: ["main"]
|
||||
description: User has clicked "Jump to definition" icon (next to logged functions) in the web console.
|
||||
release_channel_collection: opt-out
|
||||
expiry_version: never
|
||||
extra_keys:
|
||||
session_id: The start time of the session in milliseconds since epoch (Unix Timestamp) e.g. 1396381378123.
|
|
@ -110,6 +110,7 @@ ChromeUtils.defineModuleGetter(this, "LightweightThemeImageOptimizer",
|
|||
function LightweightThemeConsumer(aDocument) {
|
||||
this._doc = aDocument;
|
||||
this._win = aDocument.defaultView;
|
||||
this._winId = this._win.windowUtils.outerWindowID;
|
||||
|
||||
Services.obs.addObserver(this, "lightweight-theme-styling-update");
|
||||
|
||||
|
@ -119,8 +120,6 @@ function LightweightThemeConsumer(aDocument) {
|
|||
|
||||
this._win.addEventListener("resolutionchange", this);
|
||||
this._win.addEventListener("unload", this, { once: true });
|
||||
this._win.addEventListener("EndSwapDocShells", this, true);
|
||||
this._win.messageManager.addMessageListener("LightweightTheme:Request", this);
|
||||
|
||||
let darkThemeMediaQuery = this._win.matchMedia("(-moz-system-dark-theme)");
|
||||
darkThemeMediaQuery.addListener(temp.LightweightThemeManager);
|
||||
|
@ -136,27 +135,18 @@ LightweightThemeConsumer.prototype = {
|
|||
if (aTopic != "lightweight-theme-styling-update")
|
||||
return;
|
||||
|
||||
const { outerWindowID } = this._win.windowUtils;
|
||||
|
||||
let parsedData = JSON.parse(aData);
|
||||
if (!parsedData) {
|
||||
parsedData = { theme: null };
|
||||
}
|
||||
|
||||
if (parsedData.window && parsedData.window !== outerWindowID) {
|
||||
if (parsedData.window && parsedData.window !== this._winId) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._update(parsedData.theme);
|
||||
},
|
||||
|
||||
receiveMessage({ name, target }) {
|
||||
if (name == "LightweightTheme:Request") {
|
||||
let contentThemeData = _getContentProperties(this._doc, this._active, this._lastData);
|
||||
target.messageManager.sendAsyncMessage("LightweightTheme:Update", contentThemeData);
|
||||
}
|
||||
},
|
||||
|
||||
handleEvent(aEvent) {
|
||||
switch (aEvent.type) {
|
||||
case "resolutionchange":
|
||||
|
@ -166,14 +156,10 @@ LightweightThemeConsumer.prototype = {
|
|||
break;
|
||||
case "unload":
|
||||
Services.obs.removeObserver(this, "lightweight-theme-styling-update");
|
||||
Services.ppmm.sharedData.delete(`theme/${this._winId}`);
|
||||
this._win.removeEventListener("resolutionchange", this);
|
||||
this._win.removeEventListener("EndSwapDocShells", this, true);
|
||||
this._win = this._doc = null;
|
||||
break;
|
||||
case "EndSwapDocShells":
|
||||
let contentThemeData = _getContentProperties(this._doc, this._active, this._lastData);
|
||||
aEvent.target.messageManager.sendAsyncMessage("LightweightTheme:Update", contentThemeData);
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -227,11 +213,7 @@ LightweightThemeConsumer.prototype = {
|
|||
root.removeAttribute("lwthemefooter");
|
||||
|
||||
let contentThemeData = _getContentProperties(this._doc, active, aData);
|
||||
|
||||
let browserMessageManager = this._win.getGroupMessageManager("browsers");
|
||||
browserMessageManager.broadcastAsyncMessage(
|
||||
"LightweightTheme:Update", contentThemeData
|
||||
);
|
||||
Services.ppmm.sharedData.set(`theme/${this._winId}`, contentThemeData);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -978,10 +978,13 @@ nsXREDirProvider::DoStartup()
|
|||
static const char16_t kStartup[] = {'s','t','a','r','t','u','p','\0'};
|
||||
obsSvc->NotifyObservers(nullptr, "profile-do-change", kStartup);
|
||||
|
||||
// Initialize the Enterprise Policies service
|
||||
nsCOMPtr<nsIObserver> policies(do_GetService("@mozilla.org/browser/enterprisepolicies;1"));
|
||||
if (policies) {
|
||||
policies->Observe(nullptr, "policies-startup", nullptr);
|
||||
// Initialize the Enterprise Policies service in the parent process
|
||||
// In the content process it's loaded on demand when needed
|
||||
if (XRE_IsParentProcess()) {
|
||||
nsCOMPtr<nsIObserver> policies(do_GetService("@mozilla.org/browser/enterprisepolicies;1"));
|
||||
if (policies) {
|
||||
policies->Observe(nullptr, "policies-startup", nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
// Init the Extension Manager
|
||||
|
|
|
@ -3298,11 +3298,6 @@ profiler_register_thread(const char* aName, void* aGuessStackTop)
|
|||
MOZ_ASSERT_IF(NS_IsMainThread(), Scheduler::IsCooperativeThread());
|
||||
MOZ_RELEASE_ASSERT(CorePS::Exists());
|
||||
|
||||
// Make sure we have a nsThread wrapper for the current thread, and that NSPR
|
||||
// knows its name.
|
||||
(void) NS_GetCurrentThread();
|
||||
NS_SetCurrentThreadName(aName);
|
||||
|
||||
PSAutoLock lock(gPSMutex);
|
||||
|
||||
void* stackTop = GetStackTop(aGuessStackTop);
|
||||
|
|
|
@ -405,13 +405,6 @@ nsThread::ThreadList()
|
|||
return sList;
|
||||
}
|
||||
|
||||
/* static */ void
|
||||
nsThread::ClearThreadList()
|
||||
{
|
||||
OffTheBooksMutexAutoLock mal(ThreadListMutex());
|
||||
while (ThreadList().popFirst()) {}
|
||||
}
|
||||
|
||||
/* static */ nsThreadEnumerator
|
||||
nsThread::Enumerate()
|
||||
{
|
||||
|
@ -427,6 +420,7 @@ nsThread::ThreadFunc(void* aArg)
|
|||
nsThread* self = initData->thread; // strong reference
|
||||
|
||||
self->mThread = PR_GetCurrentThread();
|
||||
self->mThreadId = uint32_t(PlatformThread::CurrentId());
|
||||
self->mVirtualThread = GetCurrentVirtualThread();
|
||||
self->mEventTarget->SetCurrentThread();
|
||||
SetupCurrentThreadForChaosMode();
|
||||
|
@ -435,7 +429,71 @@ nsThread::ThreadFunc(void* aArg)
|
|||
NS_SetCurrentThreadName(initData->name.BeginReading());
|
||||
}
|
||||
|
||||
self->InitCommon();
|
||||
{
|
||||
#if defined(XP_LINUX)
|
||||
pthread_attr_t attr;
|
||||
pthread_attr_init(&attr);
|
||||
pthread_getattr_np(pthread_self(), &attr);
|
||||
|
||||
size_t stackSize;
|
||||
pthread_attr_getstack(&attr, &self->mStackBase, &stackSize);
|
||||
|
||||
// Glibc prior to 2.27 reports the stack size and base including the guard
|
||||
// region, so we need to compensate for it to get accurate accounting.
|
||||
// Also, this behavior difference isn't guarded by a versioned symbol, so we
|
||||
// actually need to check the runtime glibc version, not the version we were
|
||||
// compiled against.
|
||||
static bool sAdjustForGuardSize = ({
|
||||
#ifdef __GLIBC__
|
||||
unsigned major, minor;
|
||||
sscanf(gnu_get_libc_version(), "%u.%u", &major, &minor) < 2 ||
|
||||
major < 2 || (major == 2 && minor < 27);
|
||||
#else
|
||||
false;
|
||||
#endif
|
||||
});
|
||||
if (sAdjustForGuardSize) {
|
||||
size_t guardSize;
|
||||
pthread_attr_getguardsize(&attr, &guardSize);
|
||||
|
||||
// Note: This assumes that the stack grows down, as is the case on all of
|
||||
// our tier 1 platforms. On platforms where the stack grows up, the
|
||||
// mStackBase adjustment is unnecessary, but doesn't cause any harm other
|
||||
// than under-counting stack memory usage by one page.
|
||||
self->mStackBase = reinterpret_cast<char*>(self->mStackBase) + guardSize;
|
||||
stackSize -= guardSize;
|
||||
}
|
||||
|
||||
self->mStackSize = stackSize;
|
||||
|
||||
// This is a bit of a hack.
|
||||
//
|
||||
// We really do want the NOHUGEPAGE flag on our thread stacks, since we
|
||||
// don't expect any of them to need anywhere near 2MB of space. But setting
|
||||
// it here is too late to have an effect, since the first stack page has
|
||||
// already been faulted in existence, and NSPR doesn't give us a way to set
|
||||
// it beforehand.
|
||||
//
|
||||
// What this does get us, however, is a different set of VM flags on our
|
||||
// thread stacks compared to normal heap memory. Which makes the Linux
|
||||
// kernel report them as separate regions, even when they are adjacent to
|
||||
// heap memory. This allows us to accurately track the actual memory
|
||||
// consumption of our allocated stacks.
|
||||
madvise(self->mStackBase, stackSize, MADV_NOHUGEPAGE);
|
||||
|
||||
pthread_attr_destroy(&attr);
|
||||
#elif defined(XP_WIN)
|
||||
static const DynamicallyLinkedFunctionPtr<GetCurrentThreadStackLimitsFn>
|
||||
sGetStackLimits(L"kernel32.dll", "GetCurrentThreadStackLimits");
|
||||
|
||||
if (sGetStackLimits) {
|
||||
ULONG_PTR stackBottom, stackTop;
|
||||
sGetStackLimits(&stackBottom, &stackTop);
|
||||
self->mStackBase = reinterpret_cast<void*>(stackBottom);
|
||||
self->mStackSize = stackTop - stackBottom;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Inform the ThreadManager
|
||||
nsThreadManager::get().RegisterCurrentThread(*self);
|
||||
|
@ -516,81 +574,6 @@ nsThread::ThreadFunc(void* aArg)
|
|||
NS_RELEASE(self);
|
||||
}
|
||||
|
||||
void
|
||||
nsThread::InitCommon()
|
||||
{
|
||||
mThreadId = uint32_t(PlatformThread::CurrentId());
|
||||
|
||||
{
|
||||
#if defined(XP_LINUX)
|
||||
pthread_attr_t attr;
|
||||
pthread_attr_init(&attr);
|
||||
pthread_getattr_np(pthread_self(), &attr);
|
||||
|
||||
size_t stackSize;
|
||||
pthread_attr_getstack(&attr, &mStackBase, &stackSize);
|
||||
|
||||
// Glibc prior to 2.27 reports the stack size and base including the guard
|
||||
// region, so we need to compensate for it to get accurate accounting.
|
||||
// Also, this behavior difference isn't guarded by a versioned symbol, so we
|
||||
// actually need to check the runtime glibc version, not the version we were
|
||||
// compiled against.
|
||||
static bool sAdjustForGuardSize = ({
|
||||
#ifdef __GLIBC__
|
||||
unsigned major, minor;
|
||||
sscanf(gnu_get_libc_version(), "%u.%u", &major, &minor) < 2 ||
|
||||
major < 2 || (major == 2 && minor < 27);
|
||||
#else
|
||||
false;
|
||||
#endif
|
||||
});
|
||||
if (sAdjustForGuardSize) {
|
||||
size_t guardSize;
|
||||
pthread_attr_getguardsize(&attr, &guardSize);
|
||||
|
||||
// Note: This assumes that the stack grows down, as is the case on all of
|
||||
// our tier 1 platforms. On platforms where the stack grows up, the
|
||||
// mStackBase adjustment is unnecessary, but doesn't cause any harm other
|
||||
// than under-counting stack memory usage by one page.
|
||||
mStackBase = reinterpret_cast<char*>(mStackBase) + guardSize;
|
||||
stackSize -= guardSize;
|
||||
}
|
||||
|
||||
mStackSize = stackSize;
|
||||
|
||||
// This is a bit of a hack.
|
||||
//
|
||||
// We really do want the NOHUGEPAGE flag on our thread stacks, since we
|
||||
// don't expect any of them to need anywhere near 2MB of space. But setting
|
||||
// it here is too late to have an effect, since the first stack page has
|
||||
// already been faulted in existence, and NSPR doesn't give us a way to set
|
||||
// it beforehand.
|
||||
//
|
||||
// What this does get us, however, is a different set of VM flags on our
|
||||
// thread stacks compared to normal heap memory. Which makes the Linux
|
||||
// kernel report them as separate regions, even when they are adjacent to
|
||||
// heap memory. This allows us to accurately track the actual memory
|
||||
// consumption of our allocated stacks.
|
||||
madvise(mStackBase, stackSize, MADV_NOHUGEPAGE);
|
||||
|
||||
pthread_attr_destroy(&attr);
|
||||
#elif defined(XP_WIN)
|
||||
static const DynamicallyLinkedFunctionPtr<GetCurrentThreadStackLimitsFn>
|
||||
sGetStackLimits(L"kernel32.dll", "GetCurrentThreadStackLimits");
|
||||
|
||||
if (sGetStackLimits) {
|
||||
ULONG_PTR stackBottom, stackTop;
|
||||
sGetStackLimits(&stackBottom, &stackTop);
|
||||
mStackBase = reinterpret_cast<void*>(stackBottom);
|
||||
mStackSize = stackTop - stackBottom;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
OffTheBooksMutexAutoLock mal(ThreadListMutex());
|
||||
ThreadList().insertBack(this);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
// Tell the crash reporter to save a memory report if our heuristics determine
|
||||
|
@ -687,17 +670,7 @@ nsThread::~nsThread()
|
|||
{
|
||||
NS_ASSERTION(mRequestedShutdownContexts.IsEmpty(),
|
||||
"shouldn't be waiting on other threads to shutdown");
|
||||
|
||||
// We shouldn't need to lock before checking isInList at this point. We're
|
||||
// destroying the last reference to this object, so there's no way for anyone
|
||||
// else to remove it in the middle of our check. And the not-in-list state is
|
||||
// determined the element's next and previous members pointing to itself, so a
|
||||
// non-atomic update to an adjacent member won't affect the outcome either.
|
||||
if (isInList()) {
|
||||
OffTheBooksMutexAutoLock mal(ThreadListMutex());
|
||||
removeFrom(ThreadList());
|
||||
}
|
||||
|
||||
MOZ_ASSERT(!isInList());
|
||||
#ifdef DEBUG
|
||||
// We deliberately leak these so they can be tracked by the leak checker.
|
||||
// If you're having nsThreadShutdownContext leaks, you can set:
|
||||
|
@ -731,6 +704,11 @@ nsThread::Init(const nsACString& aName)
|
|||
return NS_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
{
|
||||
OffTheBooksMutexAutoLock mal(ThreadListMutex());
|
||||
ThreadList().insertBack(this);
|
||||
}
|
||||
|
||||
// ThreadFunc will wait for this event to be run before it tries to access
|
||||
// mThread. By delaying insertion of this event into the queue, we ensure
|
||||
// that mThread is set properly.
|
||||
|
@ -750,7 +728,6 @@ nsThread::InitCurrentThread()
|
|||
mThread = PR_GetCurrentThread();
|
||||
mVirtualThread = GetCurrentVirtualThread();
|
||||
SetupCurrentThreadForChaosMode();
|
||||
InitCommon();
|
||||
|
||||
nsThreadManager::get().RegisterCurrentThread(*this);
|
||||
return NS_OK;
|
||||
|
@ -879,13 +856,6 @@ nsThread::ShutdownComplete(NotNull<nsThreadShutdownContext*> aContext)
|
|||
MOZ_ASSERT(mThread);
|
||||
MOZ_ASSERT(aContext->mTerminatingThread == this);
|
||||
|
||||
{
|
||||
OffTheBooksMutexAutoLock mal(ThreadListMutex());
|
||||
if (isInList()) {
|
||||
removeFrom(ThreadList());
|
||||
}
|
||||
}
|
||||
|
||||
if (aContext->mAwaitingShutdownAck) {
|
||||
// We're in a synchronous shutdown, so tell whatever is up the stack that
|
||||
// we're done and unwind the stack so it can call us again.
|
||||
|
|
|
@ -66,12 +66,6 @@ public:
|
|||
// Initialize this as a wrapper for the current PRThread.
|
||||
nsresult InitCurrentThread();
|
||||
|
||||
private:
|
||||
// Initializes the mThreadId and stack base/size members, and adds the thread
|
||||
// to the ThreadList().
|
||||
void InitCommon();
|
||||
|
||||
public:
|
||||
// The PRThread corresponding to this thread.
|
||||
PRThread* GetPRThread()
|
||||
{
|
||||
|
@ -176,11 +170,8 @@ protected:
|
|||
|
||||
struct nsThreadShutdownContext* ShutdownInternal(bool aSync);
|
||||
|
||||
friend class nsThreadManager;
|
||||
|
||||
static mozilla::OffTheBooksMutex& ThreadListMutex();
|
||||
static mozilla::LinkedList<nsThread>& ThreadList();
|
||||
static void ClearThreadList();
|
||||
|
||||
RefPtr<mozilla::SynchronizedEventQueue> mEvents;
|
||||
RefPtr<mozilla::ThreadEventTarget> mEventTarget;
|
||||
|
@ -214,8 +205,6 @@ protected:
|
|||
// Set to true if this thread creates a JSRuntime.
|
||||
bool mCanInvokeJS;
|
||||
|
||||
bool mHasTLSEntry = false;
|
||||
|
||||
// Used to track which event is being executed by ProcessNextEvent
|
||||
nsCOMPtr<nsIRunnable> mCurrentEvent;
|
||||
|
||||
|
|
|
@ -94,27 +94,12 @@ AssertIsOnMainThread()
|
|||
|
||||
typedef nsTArray<NotNull<RefPtr<nsThread>>> nsThreadArray;
|
||||
|
||||
static bool sShutdownComplete;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
/* static */ void
|
||||
nsThreadManager::ReleaseThread(void* aData)
|
||||
static void
|
||||
ReleaseObject(void* aData)
|
||||
{
|
||||
if (sShutdownComplete) {
|
||||
// We've already completed shutdown and released the references to all or
|
||||
// our TLS wrappers. Don't try to release them again.
|
||||
return;
|
||||
}
|
||||
|
||||
auto* thread = static_cast<nsThread*>(aData);
|
||||
|
||||
get().UnregisterCurrentThread(*thread, true);
|
||||
|
||||
if (thread->mHasTLSEntry) {
|
||||
thread->mHasTLSEntry = false;
|
||||
thread->Release();
|
||||
}
|
||||
static_cast<nsISupports*>(aData)->Release();
|
||||
}
|
||||
|
||||
// statically allocated instance
|
||||
|
@ -251,7 +236,7 @@ nsThreadManager::Init()
|
|||
|
||||
Scheduler::EventLoopActivation::Init();
|
||||
|
||||
if (PR_NewThreadPrivateIndex(&mCurThreadIndex, ReleaseThread) == PR_FAILURE) {
|
||||
if (PR_NewThreadPrivateIndex(&mCurThreadIndex, ReleaseObject) == PR_FAILURE) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
|
||||
|
@ -319,34 +304,32 @@ nsThreadManager::Shutdown()
|
|||
// Empty the main thread event queue before we begin shutting down threads.
|
||||
NS_ProcessPendingEvents(mMainThread);
|
||||
|
||||
// We gather the threads from the hashtable into a list, so that we avoid
|
||||
// holding the hashtable lock while calling nsIThread::Shutdown.
|
||||
nsThreadArray threads;
|
||||
{
|
||||
// We gather the threads from the hashtable into a list, so that we avoid
|
||||
// holding the hashtable lock while calling nsIThread::Shutdown.
|
||||
nsThreadArray threads;
|
||||
{
|
||||
OffTheBooksMutexAutoLock lock(mLock);
|
||||
for (auto iter = mThreadsByPRThread.Iter(); !iter.Done(); iter.Next()) {
|
||||
RefPtr<nsThread>& thread = iter.Data();
|
||||
threads.AppendElement(WrapNotNull(thread));
|
||||
iter.Remove();
|
||||
}
|
||||
OffTheBooksMutexAutoLock lock(mLock);
|
||||
for (auto iter = mThreadsByPRThread.Iter(); !iter.Done(); iter.Next()) {
|
||||
RefPtr<nsThread>& thread = iter.Data();
|
||||
threads.AppendElement(WrapNotNull(thread));
|
||||
iter.Remove();
|
||||
}
|
||||
}
|
||||
|
||||
// It's tempting to walk the list of threads here and tell them each to stop
|
||||
// accepting new events, but that could lead to badness if one of those
|
||||
// threads is stuck waiting for a response from another thread. To do it
|
||||
// right, we'd need some way to interrupt the threads.
|
||||
//
|
||||
// Instead, we process events on the current thread while waiting for threads
|
||||
// to shutdown. This means that we have to preserve a mostly functioning
|
||||
// world until such time as the threads exit.
|
||||
// It's tempting to walk the list of threads here and tell them each to stop
|
||||
// accepting new events, but that could lead to badness if one of those
|
||||
// threads is stuck waiting for a response from another thread. To do it
|
||||
// right, we'd need some way to interrupt the threads.
|
||||
//
|
||||
// Instead, we process events on the current thread while waiting for threads
|
||||
// to shutdown. This means that we have to preserve a mostly functioning
|
||||
// world until such time as the threads exit.
|
||||
|
||||
// Shutdown all threads that require it (join with threads that we created).
|
||||
for (uint32_t i = 0; i < threads.Length(); ++i) {
|
||||
NotNull<nsThread*> thread = threads[i];
|
||||
if (thread->ShutdownRequired()) {
|
||||
thread->Shutdown();
|
||||
}
|
||||
// Shutdown all threads that require it (join with threads that we created).
|
||||
for (uint32_t i = 0; i < threads.Length(); ++i) {
|
||||
NotNull<nsThread*> thread = threads[i];
|
||||
if (thread->ShutdownRequired()) {
|
||||
thread->Shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -377,24 +360,6 @@ nsThreadManager::Shutdown()
|
|||
|
||||
// Remove the TLS entry for the main thread.
|
||||
PR_SetThreadPrivate(mCurThreadIndex, nullptr);
|
||||
|
||||
{
|
||||
// Cleanup the last references to any threads which haven't shut down yet.
|
||||
nsTArray<RefPtr<nsThread>> threads;
|
||||
for (auto* thread : nsThread::Enumerate()) {
|
||||
if (thread->mHasTLSEntry) {
|
||||
threads.AppendElement(dont_AddRef(thread));
|
||||
thread->mHasTLSEntry = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// xpcshell tests sometimes leak the main thread. They don't enable leak
|
||||
// checking, so that doesn't cause the test to fail, but leaving the entry in
|
||||
// the thread list triggers an assertion, which does.
|
||||
nsThread::ClearThreadList();
|
||||
|
||||
sShutdownComplete = true;
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -412,28 +377,21 @@ nsThreadManager::RegisterCurrentThread(nsThread& aThread)
|
|||
mThreadsByPRThread.Put(aThread.GetPRThread(), &aThread); // XXX check OOM?
|
||||
|
||||
aThread.AddRef(); // for TLS entry
|
||||
aThread.mHasTLSEntry = true;
|
||||
PR_SetThreadPrivate(mCurThreadIndex, &aThread);
|
||||
}
|
||||
|
||||
void
|
||||
nsThreadManager::UnregisterCurrentThread(nsThread& aThread, bool aIfExists)
|
||||
nsThreadManager::UnregisterCurrentThread(nsThread& aThread)
|
||||
{
|
||||
MOZ_ASSERT(aThread.GetPRThread() == PR_GetCurrentThread(), "bad aThread");
|
||||
|
||||
{
|
||||
OffTheBooksMutexAutoLock lock(mLock);
|
||||
OffTheBooksMutexAutoLock lock(mLock);
|
||||
|
||||
if (aIfExists && !mThreadsByPRThread.GetWeak(aThread.GetPRThread())) {
|
||||
return;
|
||||
}
|
||||
|
||||
--mCurrentNumberOfThreads;
|
||||
mThreadsByPRThread.Remove(aThread.GetPRThread());
|
||||
}
|
||||
--mCurrentNumberOfThreads;
|
||||
mThreadsByPRThread.Remove(aThread.GetPRThread());
|
||||
|
||||
PR_SetThreadPrivate(mCurThreadIndex, nullptr);
|
||||
// Ref-count balanced via ReleaseThread
|
||||
// Ref-count balanced via ReleaseObject
|
||||
}
|
||||
|
||||
nsThread*
|
||||
|
@ -483,13 +441,7 @@ nsThreadManager::GetCurrentThread()
|
|||
bool
|
||||
nsThreadManager::IsNSThread() const
|
||||
{
|
||||
if (!mInitialized) {
|
||||
return false;
|
||||
}
|
||||
if (auto* thread = (nsThread*)PR_GetThreadPrivate(mCurThreadIndex)) {
|
||||
return thread->mShutdownRequired;
|
||||
}
|
||||
return false;
|
||||
return mInitialized && !!PR_GetThreadPrivate(mCurThreadIndex);
|
||||
}
|
||||
|
||||
NS_IMETHODIMP
|
||||
|
|
|
@ -36,7 +36,7 @@ public:
|
|||
|
||||
// Called by nsThread to inform the ThreadManager it is going away. This
|
||||
// method must be called when the given thread is the current thread.
|
||||
void UnregisterCurrentThread(nsThread& aThread, bool aIfExists = false);
|
||||
void UnregisterCurrentThread(nsThread& aThread);
|
||||
|
||||
// Returns the current thread. Returns null if OOM or if ThreadManager isn't
|
||||
// initialized. Creates the nsThread if one does not exist yet.
|
||||
|
@ -85,8 +85,6 @@ private:
|
|||
SpinEventLoopUntilInternal(nsINestedEventLoopCondition* aCondition,
|
||||
bool aCheckingShutdown);
|
||||
|
||||
static void ReleaseThread(void* aData);
|
||||
|
||||
nsRefPtrHashtable<nsPtrHashKey<PRThread>, nsThread> mThreadsByPRThread;
|
||||
unsigned mCurThreadIndex; // thread-local-storage index
|
||||
RefPtr<nsThread> mMainThread;
|
||||
|
|
Загрузка…
Ссылка в новой задаче