зеркало из https://github.com/mozilla/gecko-dev.git
Merge m-c to inbound, a=merge
MozReview-Commit-ID: FOmIgsEP2fb
This commit is contained in:
Коммит
57987f930c
|
@ -669,7 +669,7 @@
|
|||
if (!this._shouldShowProgress(aRequest))
|
||||
return;
|
||||
|
||||
if (this.mTotalProgress && aWebProgress && aWebProgress.isTopLevel)
|
||||
if (this.mTotalProgress && this.mTab.hasAttribute("busy"))
|
||||
this.mTab.setAttribute("progress", "true");
|
||||
|
||||
this._callProgressListeners("onProgressChange",
|
||||
|
|
|
@ -213,6 +213,8 @@ var tests = [
|
|||
this.notification = showNotification(this.notifyObj);
|
||||
},
|
||||
onShown(popup) {
|
||||
let notification = popup.childNodes[0];
|
||||
is(notification.getAttribute("buttonhighlight"), "true", "default action is highlighted");
|
||||
triggerMainCommand(popup);
|
||||
},
|
||||
onHidden(popup) {
|
||||
|
@ -233,6 +235,7 @@ var tests = [
|
|||
onShown(popup) {
|
||||
let notification = popup.childNodes[0];
|
||||
is(notification.getAttribute("secondarybuttonhidden"), "true", "secondary button is hidden");
|
||||
is(notification.getAttribute("buttonhighlight"), "true", "default action is highlighted");
|
||||
triggerMainCommand(popup);
|
||||
},
|
||||
onHidden(popup) {
|
||||
|
|
|
@ -631,15 +631,18 @@ CustomizeMode.prototype = {
|
|||
aNode = aNode.firstChild;
|
||||
}
|
||||
|
||||
// If the user explicitly moves this item, turn off autohide.
|
||||
if (aNode.id == "downloads-button") {
|
||||
Services.prefs.setBoolPref(kDownloadAutoHidePref, false);
|
||||
}
|
||||
|
||||
CustomizableUI.addWidgetToArea(aNode.id, CustomizableUI.AREA_NAVBAR);
|
||||
if (!this._customizing) {
|
||||
CustomizableUI.dispatchToolboxEvent("customizationchange");
|
||||
}
|
||||
|
||||
// If the user explicitly moves this item, turn off autohide.
|
||||
if (aNode.id == "downloads-button") {
|
||||
Services.prefs.setBoolPref(kDownloadAutoHidePref, false);
|
||||
if (this._customizing) {
|
||||
this._showDownloadsAutoHidePanel();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
addToPanel(aNode) {
|
||||
|
@ -648,17 +651,20 @@ CustomizeMode.prototype = {
|
|||
aNode = aNode.firstChild;
|
||||
}
|
||||
|
||||
// If the user explicitly moves this item, turn off autohide.
|
||||
if (aNode.id == "downloads-button") {
|
||||
Services.prefs.setBoolPref(kDownloadAutoHidePref, false);
|
||||
}
|
||||
|
||||
let panel = CustomizableUI.AREA_FIXED_OVERFLOW_PANEL;
|
||||
CustomizableUI.addWidgetToArea(aNode.id, panel);
|
||||
if (!this._customizing) {
|
||||
CustomizableUI.dispatchToolboxEvent("customizationchange");
|
||||
}
|
||||
|
||||
// If the user explicitly moves this item, turn off autohide.
|
||||
if (aNode.id == "downloads-button") {
|
||||
Services.prefs.setBoolPref(kDownloadAutoHidePref, false);
|
||||
if (this._customizing) {
|
||||
this._showDownloadsAutoHidePanel();
|
||||
}
|
||||
}
|
||||
|
||||
if (Services.prefs.getBoolPref("toolkit.cosmeticAnimations.enabled")) {
|
||||
let overflowButton = this.document.getElementById("nav-bar-overflow-button");
|
||||
BrowserUtils.setToolbarButtonHeightProperty(overflowButton).then(() => {
|
||||
|
@ -681,14 +687,18 @@ CustomizeMode.prototype = {
|
|||
if (aNode.localName == "toolbarpaletteitem" && aNode.firstChild) {
|
||||
aNode = aNode.firstChild;
|
||||
}
|
||||
// If the user explicitly removes this item, turn off autohide.
|
||||
if (aNode.id == "downloads-button") {
|
||||
Services.prefs.setBoolPref(kDownloadAutoHidePref, false);
|
||||
}
|
||||
CustomizableUI.removeWidgetFromArea(aNode.id);
|
||||
if (!this._customizing) {
|
||||
CustomizableUI.dispatchToolboxEvent("customizationchange");
|
||||
}
|
||||
|
||||
// If the user explicitly removes this item, turn off autohide.
|
||||
if (aNode.id == "downloads-button") {
|
||||
Services.prefs.setBoolPref(kDownloadAutoHidePref, false);
|
||||
if (this._customizing) {
|
||||
this._showDownloadsAutoHidePanel();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
populatePalette() {
|
||||
|
@ -1205,9 +1215,6 @@ CustomizeMode.prototype = {
|
|||
this.visiblePalette.appendChild(paletteItem);
|
||||
}
|
||||
}
|
||||
if (aNodeToChange.id == "downloads-button") {
|
||||
this._showDownloadsAutoHidePanel();
|
||||
}
|
||||
},
|
||||
|
||||
onWidgetDestroyed(aWidgetId) {
|
||||
|
@ -1832,6 +1839,7 @@ CustomizeMode.prototype = {
|
|||
// If the user explicitly moves this item, turn off autohide.
|
||||
if (draggedItemId == "downloads-button") {
|
||||
Services.prefs.setBoolPref(kDownloadAutoHidePref, false);
|
||||
this._showDownloadsAutoHidePanel();
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -2360,7 +2368,7 @@ CustomizeMode.prototype = {
|
|||
}
|
||||
},
|
||||
|
||||
_showDownloadsAutoHidePanel() {
|
||||
async _showDownloadsAutoHidePanel() {
|
||||
let doc = this.document;
|
||||
let panel = doc.getElementById(kDownloadAutohidePanelId);
|
||||
panel.hidePopup();
|
||||
|
@ -2370,6 +2378,38 @@ CustomizeMode.prototype = {
|
|||
return;
|
||||
}
|
||||
|
||||
let offsetX = 0, offsetY = 0;
|
||||
let panelOnTheLeft = false;
|
||||
let toolbarContainer = button.closest("toolbar");
|
||||
if (toolbarContainer && toolbarContainer.id == "nav-bar") {
|
||||
let navbarWidgets = CustomizableUI.getWidgetIdsInArea("nav-bar");
|
||||
if (navbarWidgets.indexOf("urlbar-container") <= navbarWidgets.indexOf("downloads-button")) {
|
||||
panelOnTheLeft = true;
|
||||
}
|
||||
} else {
|
||||
await BrowserUtils.promiseLayoutFlushed(doc, "display", () => {});
|
||||
if (!this._customizing || !this._wantToBeInCustomizeMode) {
|
||||
return;
|
||||
}
|
||||
let buttonBounds = this._dwu.getBoundsWithoutFlushing(button);
|
||||
let windowBounds = this._dwu.getBoundsWithoutFlushing(doc.documentElement);
|
||||
panelOnTheLeft = (buttonBounds.left + buttonBounds.width / 2) > windowBounds.width / 2;
|
||||
}
|
||||
let position;
|
||||
if (panelOnTheLeft) {
|
||||
// Tested in RTL, these get inverted automatically, so this does the
|
||||
// right thing without taking RTL into account explicitly.
|
||||
position = "leftcenter topright";
|
||||
if (toolbarContainer) {
|
||||
offsetX = 8;
|
||||
}
|
||||
} else {
|
||||
position = "rightcenter topleft";
|
||||
if (toolbarContainer) {
|
||||
offsetX = -8;
|
||||
}
|
||||
}
|
||||
|
||||
let checkbox = doc.getElementById(kDownloadAutohideCheckboxId);
|
||||
if (this.window.DownloadsButton.autoHideDownloadsButton) {
|
||||
checkbox.setAttribute("checked", "true");
|
||||
|
@ -2377,28 +2417,6 @@ CustomizeMode.prototype = {
|
|||
checkbox.removeAttribute("checked");
|
||||
}
|
||||
|
||||
let offsetX = 0, offsetY = 0;
|
||||
let position;
|
||||
if (button.closest("#nav-bar")) {
|
||||
let navbarWidgets = CustomizableUI.getWidgetIdsInArea("nav-bar");
|
||||
if (navbarWidgets.indexOf("urlbar-container") > navbarWidgets.indexOf("downloads-button")) {
|
||||
// Tested in RTL, these get inverted automatically, so this does the
|
||||
// right thing without taking RTL into account explicitly.
|
||||
position = "rightcenter topleft";
|
||||
offsetX = -8;
|
||||
} else {
|
||||
position = "leftcenter topright";
|
||||
offsetX = 8;
|
||||
}
|
||||
} else if (button.closest("#customization-palette")) {
|
||||
position = "topcenter bottomleft";
|
||||
offsetY = 10;
|
||||
} else {
|
||||
// For non-navbar toolbars, this works better than guessing whether
|
||||
// left or right is a better place to position:
|
||||
position = "bottomcenter topleft";
|
||||
offsetY = -5;
|
||||
}
|
||||
// We don't use the icon to anchor because it might be resizing because of
|
||||
// the animations for drag/drop. Hence the use of offsets.
|
||||
panel.openPopup(button, position, offsetX, offsetY);
|
||||
|
|
|
@ -133,13 +133,16 @@ var tests = [
|
|||
isnot(PanelUI.panel.state, "closed", "Panel should have opened");
|
||||
isnot(highlight.classList.contains("rounded-highlight"), true, "Highlight should not be round-rectangle styled.");
|
||||
|
||||
let hiddenPromise = promisePanelElementHidden(window, PanelUI.panel);
|
||||
// Move the highlight outside which should close the app menu.
|
||||
gContentAPI.showHighlight("appMenu");
|
||||
hiddenPromise.then(() => {
|
||||
waitForElementToBeVisible(highlight, function checkPanelIsClosed() {
|
||||
isnot(PanelUI.panel.state, "open",
|
||||
"Panel should have closed after the highlight moved elsewhere.");
|
||||
done();
|
||||
}, "Highlight should move to the appMenu button");
|
||||
});
|
||||
}, "Highlight should be shown after showHighlight() for fixed panel items");
|
||||
},
|
||||
function test_highlight_customize_manual_open_close(done) {
|
||||
|
|
|
@ -34,6 +34,7 @@ skip-if = os != "win"
|
|||
run-if = crashreporter
|
||||
[browser_urlBar_zoom.js]
|
||||
[browser_UsageTelemetry.js]
|
||||
[browser_UsageTelemetry_domains.js]
|
||||
[browser_UsageTelemetry_private_and_restore.js]
|
||||
[browser_UsageTelemetry_urlbar.js]
|
||||
skip-if = (os == 'linux' && bits == 32 && debug) # bug 1356758
|
||||
|
|
|
@ -16,53 +16,6 @@ XPCOMUtils.defineLazyModuleGetter(this, "MINIMUM_TAB_COUNT_INTERVAL_MS",
|
|||
// Reset internal URI counter in case URIs were opened by other tests.
|
||||
Services.obs.notifyObservers(null, TELEMETRY_SUBSESSION_TOPIC);
|
||||
|
||||
/**
|
||||
* Waits for the web progress listener associated with this tab to fire an
|
||||
* onLocationChange for a non-error page.
|
||||
*
|
||||
* @param {xul:browser} browser
|
||||
* A xul:browser.
|
||||
*
|
||||
* @return {Promise}
|
||||
* @resolves When navigating to a non-error page.
|
||||
*/
|
||||
function browserLocationChanged(browser) {
|
||||
return new Promise(resolve => {
|
||||
let wpl = {
|
||||
onStateChange() {},
|
||||
onSecurityChange() {},
|
||||
onStatusChange() {},
|
||||
onLocationChange(aWebProgress, aRequest, aURI, aFlags) {
|
||||
if (!(aFlags & Ci.nsIWebProgressListener.LOCATION_CHANGE_ERROR_PAGE)) {
|
||||
browser.webProgress.removeProgressListener(filter);
|
||||
filter.removeProgressListener(wpl);
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
QueryInterface: XPCOMUtils.generateQI([
|
||||
Ci.nsIWebProgressListener,
|
||||
Ci.nsIWebProgressListener2,
|
||||
]),
|
||||
};
|
||||
const filter = Cc["@mozilla.org/appshell/component/browser-status-filter;1"]
|
||||
.createInstance(Ci.nsIWebProgress);
|
||||
filter.addProgressListener(wpl, Ci.nsIWebProgress.NOTIFY_ALL);
|
||||
browser.webProgress.addProgressListener(filter, Ci.nsIWebProgress.NOTIFY_ALL);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* An helper that checks the value of a scalar if it's expected to be > 0,
|
||||
* otherwise makes sure that the scalar it's not reported.
|
||||
*/
|
||||
let checkScalar = (scalars, scalarName, value, msg) => {
|
||||
if (value > 0) {
|
||||
is(scalars[scalarName], value, msg);
|
||||
return;
|
||||
}
|
||||
ok(!(scalarName in scalars), scalarName + " must not be reported.");
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a snapshot of the scalars and check them against the provided values.
|
||||
*/
|
||||
|
@ -183,94 +136,6 @@ add_task(async function test_subsessionSplit() {
|
|||
await BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
||||
add_task(async function test_URIAndDomainCounts() {
|
||||
// Let's reset the counts.
|
||||
Services.telemetry.clearScalars();
|
||||
|
||||
let checkCounts = (countsObject) => {
|
||||
// Get a snapshot of the scalars and then clear them.
|
||||
const scalars = getParentProcessScalars(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN);
|
||||
checkScalar(scalars, TOTAL_URI_COUNT, countsObject.totalURIs,
|
||||
"The URI scalar must contain the expected value.");
|
||||
checkScalar(scalars, UNIQUE_DOMAINS_COUNT, countsObject.domainCount,
|
||||
"The unique domains scalar must contain the expected value.");
|
||||
checkScalar(scalars, UNFILTERED_URI_COUNT, countsObject.totalUnfilteredURIs,
|
||||
"The unfiltered URI scalar must contain the expected value.");
|
||||
};
|
||||
|
||||
// Check that about:blank doesn't get counted in the URI total.
|
||||
let firstTab = await BrowserTestUtils.openNewForegroundTab(gBrowser, "about:blank");
|
||||
checkCounts({totalURIs: 0, domainCount: 0, totalUnfilteredURIs: 0});
|
||||
|
||||
// Open a different page and check the counts.
|
||||
await BrowserTestUtils.loadURI(firstTab.linkedBrowser, "http://example.com/");
|
||||
await BrowserTestUtils.browserLoaded(firstTab.linkedBrowser);
|
||||
checkCounts({totalURIs: 1, domainCount: 1, totalUnfilteredURIs: 1});
|
||||
|
||||
// Activating a different tab must not increase the URI count.
|
||||
let secondTab = await BrowserTestUtils.openNewForegroundTab(gBrowser, "about:blank");
|
||||
await BrowserTestUtils.switchTab(gBrowser, firstTab);
|
||||
checkCounts({totalURIs: 1, domainCount: 1, totalUnfilteredURIs: 1});
|
||||
await BrowserTestUtils.removeTab(secondTab);
|
||||
|
||||
// Open a new window and set the tab to a new address.
|
||||
let newWin = await BrowserTestUtils.openNewBrowserWindow();
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, "http://example.com/");
|
||||
await BrowserTestUtils.browserLoaded(newWin.gBrowser.selectedBrowser);
|
||||
checkCounts({totalURIs: 2, domainCount: 1, totalUnfilteredURIs: 2});
|
||||
|
||||
// We should not count AJAX requests.
|
||||
const XHR_URL = "http://example.com/r";
|
||||
await ContentTask.spawn(newWin.gBrowser.selectedBrowser, XHR_URL, function(url) {
|
||||
return new Promise(resolve => {
|
||||
var xhr = new content.window.XMLHttpRequest();
|
||||
xhr.open("GET", url);
|
||||
xhr.onload = () => resolve();
|
||||
xhr.send();
|
||||
});
|
||||
});
|
||||
checkCounts({totalURIs: 2, domainCount: 1, totalUnfilteredURIs: 2});
|
||||
|
||||
// Check that we're counting page fragments.
|
||||
let loadingStopped = browserLocationChanged(newWin.gBrowser.selectedBrowser);
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, "http://example.com/#2");
|
||||
await loadingStopped;
|
||||
checkCounts({totalURIs: 3, domainCount: 1, totalUnfilteredURIs: 3});
|
||||
|
||||
// Check that a different URI from the example.com domain doesn't increment the unique count.
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, "http://test1.example.com/");
|
||||
await BrowserTestUtils.browserLoaded(newWin.gBrowser.selectedBrowser);
|
||||
checkCounts({totalURIs: 4, domainCount: 1, totalUnfilteredURIs: 4});
|
||||
|
||||
// Make sure that the unique domains counter is incrementing for a different domain.
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, "https://example.org/");
|
||||
await BrowserTestUtils.browserLoaded(newWin.gBrowser.selectedBrowser);
|
||||
checkCounts({totalURIs: 5, domainCount: 2, totalUnfilteredURIs: 5});
|
||||
|
||||
// Check that we only account for top level loads (e.g. we don't count URIs from
|
||||
// embedded iframes).
|
||||
await ContentTask.spawn(newWin.gBrowser.selectedBrowser, null, async function() {
|
||||
let doc = content.document;
|
||||
let iframe = doc.createElement("iframe");
|
||||
let promiseIframeLoaded = ContentTaskUtils.waitForEvent(iframe, "load", false);
|
||||
iframe.src = "https://example.org/test";
|
||||
doc.body.insertBefore(iframe, doc.body.firstChild);
|
||||
await promiseIframeLoaded;
|
||||
});
|
||||
checkCounts({totalURIs: 5, domainCount: 2, totalUnfilteredURIs: 5});
|
||||
|
||||
// Check that uncommon protocols get counted in the unfiltered URI probe.
|
||||
const TEST_PAGE =
|
||||
"data:text/html,<a id='target' href='%23par1'>Click me</a><a name='par1'>The paragraph.</a>";
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, TEST_PAGE);
|
||||
await BrowserTestUtils.browserLoaded(newWin.gBrowser.selectedBrowser);
|
||||
checkCounts({totalURIs: 5, domainCount: 2, totalUnfilteredURIs: 6});
|
||||
|
||||
// Clean up.
|
||||
await BrowserTestUtils.removeTab(firstTab);
|
||||
await BrowserTestUtils.closeWindow(newWin);
|
||||
});
|
||||
|
||||
function checkTabCountHistogram(result, expected, message) {
|
||||
let expectedPadded = result.counts.map((val, idx) => idx < expected.length ? expected[idx] : 0);
|
||||
Assert.deepEqual(result.counts, expectedPadded, message);
|
||||
|
|
|
@ -0,0 +1,132 @@
|
|||
"use strict";
|
||||
|
||||
const TOTAL_URI_COUNT = "browser.engagement.total_uri_count";
|
||||
const UNIQUE_DOMAINS_COUNT = "browser.engagement.unique_domains_count";
|
||||
const UNFILTERED_URI_COUNT = "browser.engagement.unfiltered_uri_count";
|
||||
const TELEMETRY_SUBSESSION_TOPIC = "internal-telemetry-after-subsession-split";
|
||||
|
||||
// Reset internal URI counter in case URIs were opened by other tests.
|
||||
Services.obs.notifyObservers(null, TELEMETRY_SUBSESSION_TOPIC);
|
||||
|
||||
/**
|
||||
* Waits for the web progress listener associated with this tab to fire an
|
||||
* onLocationChange for a non-error page.
|
||||
*
|
||||
* @param {xul:browser} browser
|
||||
* A xul:browser.
|
||||
*
|
||||
* @return {Promise}
|
||||
* @resolves When navigating to a non-error page.
|
||||
*/
|
||||
function browserLocationChanged(browser) {
|
||||
return new Promise(resolve => {
|
||||
let wpl = {
|
||||
onStateChange() {},
|
||||
onSecurityChange() {},
|
||||
onStatusChange() {},
|
||||
onLocationChange(aWebProgress, aRequest, aURI, aFlags) {
|
||||
if (!(aFlags & Ci.nsIWebProgressListener.LOCATION_CHANGE_ERROR_PAGE)) {
|
||||
browser.webProgress.removeProgressListener(filter);
|
||||
filter.removeProgressListener(wpl);
|
||||
resolve();
|
||||
}
|
||||
},
|
||||
QueryInterface: XPCOMUtils.generateQI([
|
||||
Ci.nsIWebProgressListener,
|
||||
Ci.nsIWebProgressListener2,
|
||||
]),
|
||||
};
|
||||
const filter = Cc["@mozilla.org/appshell/component/browser-status-filter;1"]
|
||||
.createInstance(Ci.nsIWebProgress);
|
||||
filter.addProgressListener(wpl, Ci.nsIWebProgress.NOTIFY_ALL);
|
||||
browser.webProgress.addProgressListener(filter, Ci.nsIWebProgress.NOTIFY_ALL);
|
||||
});
|
||||
}
|
||||
|
||||
add_task(async function test_URIAndDomainCounts() {
|
||||
// Let's reset the counts.
|
||||
Services.telemetry.clearScalars();
|
||||
|
||||
let checkCounts = (countsObject) => {
|
||||
// Get a snapshot of the scalars and then clear them.
|
||||
const scalars = getParentProcessScalars(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN);
|
||||
checkScalar(scalars, TOTAL_URI_COUNT, countsObject.totalURIs,
|
||||
"The URI scalar must contain the expected value.");
|
||||
checkScalar(scalars, UNIQUE_DOMAINS_COUNT, countsObject.domainCount,
|
||||
"The unique domains scalar must contain the expected value.");
|
||||
checkScalar(scalars, UNFILTERED_URI_COUNT, countsObject.totalUnfilteredURIs,
|
||||
"The unfiltered URI scalar must contain the expected value.");
|
||||
};
|
||||
|
||||
// Check that about:blank doesn't get counted in the URI total.
|
||||
let firstTab = await BrowserTestUtils.openNewForegroundTab(gBrowser, "about:blank");
|
||||
checkCounts({totalURIs: 0, domainCount: 0, totalUnfilteredURIs: 0});
|
||||
|
||||
// Open a different page and check the counts.
|
||||
await BrowserTestUtils.loadURI(firstTab.linkedBrowser, "http://example.com/");
|
||||
await BrowserTestUtils.browserLoaded(firstTab.linkedBrowser);
|
||||
checkCounts({totalURIs: 1, domainCount: 1, totalUnfilteredURIs: 1});
|
||||
|
||||
// Activating a different tab must not increase the URI count.
|
||||
let secondTab = await BrowserTestUtils.openNewForegroundTab(gBrowser, "about:blank");
|
||||
await BrowserTestUtils.switchTab(gBrowser, firstTab);
|
||||
checkCounts({totalURIs: 1, domainCount: 1, totalUnfilteredURIs: 1});
|
||||
await BrowserTestUtils.removeTab(secondTab);
|
||||
|
||||
// Open a new window and set the tab to a new address.
|
||||
let newWin = await BrowserTestUtils.openNewBrowserWindow();
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, "http://example.com/");
|
||||
await BrowserTestUtils.browserLoaded(newWin.gBrowser.selectedBrowser);
|
||||
checkCounts({totalURIs: 2, domainCount: 1, totalUnfilteredURIs: 2});
|
||||
|
||||
// We should not count AJAX requests.
|
||||
const XHR_URL = "http://example.com/r";
|
||||
await ContentTask.spawn(newWin.gBrowser.selectedBrowser, XHR_URL, function(url) {
|
||||
return new Promise(resolve => {
|
||||
var xhr = new content.window.XMLHttpRequest();
|
||||
xhr.open("GET", url);
|
||||
xhr.onload = () => resolve();
|
||||
xhr.send();
|
||||
});
|
||||
});
|
||||
checkCounts({totalURIs: 2, domainCount: 1, totalUnfilteredURIs: 2});
|
||||
|
||||
// Check that we're counting page fragments.
|
||||
let loadingStopped = browserLocationChanged(newWin.gBrowser.selectedBrowser);
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, "http://example.com/#2");
|
||||
await loadingStopped;
|
||||
checkCounts({totalURIs: 3, domainCount: 1, totalUnfilteredURIs: 3});
|
||||
|
||||
// Check that a different URI from the example.com domain doesn't increment the unique count.
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, "http://test1.example.com/");
|
||||
await BrowserTestUtils.browserLoaded(newWin.gBrowser.selectedBrowser);
|
||||
checkCounts({totalURIs: 4, domainCount: 1, totalUnfilteredURIs: 4});
|
||||
|
||||
// Make sure that the unique domains counter is incrementing for a different domain.
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, "https://example.org/");
|
||||
await BrowserTestUtils.browserLoaded(newWin.gBrowser.selectedBrowser);
|
||||
checkCounts({totalURIs: 5, domainCount: 2, totalUnfilteredURIs: 5});
|
||||
|
||||
// Check that we only account for top level loads (e.g. we don't count URIs from
|
||||
// embedded iframes).
|
||||
await ContentTask.spawn(newWin.gBrowser.selectedBrowser, null, async function() {
|
||||
let doc = content.document;
|
||||
let iframe = doc.createElement("iframe");
|
||||
let promiseIframeLoaded = ContentTaskUtils.waitForEvent(iframe, "load", false);
|
||||
iframe.src = "https://example.org/test";
|
||||
doc.body.insertBefore(iframe, doc.body.firstChild);
|
||||
await promiseIframeLoaded;
|
||||
});
|
||||
checkCounts({totalURIs: 5, domainCount: 2, totalUnfilteredURIs: 5});
|
||||
|
||||
// Check that uncommon protocols get counted in the unfiltered URI probe.
|
||||
const TEST_PAGE =
|
||||
"data:text/html,<a id='target' href='%23par1'>Click me</a><a name='par1'>The paragraph.</a>";
|
||||
await BrowserTestUtils.loadURI(newWin.gBrowser.selectedBrowser, TEST_PAGE);
|
||||
await BrowserTestUtils.browserLoaded(newWin.gBrowser.selectedBrowser);
|
||||
checkCounts({totalURIs: 5, domainCount: 2, totalUnfilteredURIs: 6});
|
||||
|
||||
// Clean up.
|
||||
await BrowserTestUtils.removeTab(firstTab);
|
||||
await BrowserTestUtils.closeWindow(newWin);
|
||||
});
|
|
@ -57,6 +57,27 @@ function checkKeyedScalar(scalars, scalarName, key, expectedValue) {
|
|||
scalarName + "['" + key + "'] must contain the expected value");
|
||||
}
|
||||
|
||||
/**
|
||||
* An helper that checks the value of a scalar if it's expected to be > 0,
|
||||
* otherwise makes sure that the scalar it's not reported.
|
||||
*
|
||||
* @param {Object} scalars
|
||||
* The snapshot of the scalars.
|
||||
* @param {String} scalarName
|
||||
* The name of the scalar to check.
|
||||
* @param {Number} value
|
||||
* The expected value for the provided scalar.
|
||||
* @param {String} msg
|
||||
* The message to print when checking the value.
|
||||
*/
|
||||
let checkScalar = (scalars, scalarName, value, msg) => {
|
||||
if (value > 0) {
|
||||
is(scalars[scalarName], value, msg);
|
||||
return;
|
||||
}
|
||||
ok(!(scalarName in scalars), scalarName + " must not be reported.");
|
||||
};
|
||||
|
||||
/**
|
||||
* An utility function to write some text in the search input box
|
||||
* in a content page.
|
||||
|
|
|
@ -232,10 +232,6 @@ panelview {
|
|||
-moz-box-flex: 1;
|
||||
}
|
||||
|
||||
.cui-widget-panelview .subviewbutton.panel-subview-footer .menu-accel-container {
|
||||
-moz-box-pack: end;
|
||||
}
|
||||
|
||||
#appMenu-popup > arrowscrollbox > autorepeatbutton,
|
||||
#PanelUI-popup > arrowscrollbox > autorepeatbutton {
|
||||
display: none;
|
||||
|
@ -1260,6 +1256,7 @@ panelview .toolbarbutton-1,
|
|||
.subviewbutton.panel-subview-footer > .menu-text {
|
||||
-moz-appearance: none;
|
||||
padding-inline-end: 6px;
|
||||
padding-inline-start: 0 !important; /* to override menu.css on Windows */
|
||||
-moz-box-flex: 0;
|
||||
}
|
||||
|
||||
|
@ -1271,6 +1268,11 @@ panelview .toolbarbutton-1,
|
|||
padding-inline-start: 6px;
|
||||
}
|
||||
|
||||
.subviewbutton > .menu-accel-container > .menu-iconic-accel,
|
||||
.subviewbutton > .menu-accel-container > .menu-accel {
|
||||
margin-inline-end: 0 !important; /* to override menu.css on Windows */
|
||||
}
|
||||
|
||||
#widget-overflow-fixed-list .toolbarbutton-1 > .toolbarbutton-text,
|
||||
#widget-overflow-list .toolbarbutton-1 > .toolbarbutton-text,
|
||||
.subviewbutton:not(.panel-subview-footer) > .toolbarbutton-text,
|
||||
|
@ -1513,7 +1515,7 @@ menuitem.panel-subview-footer@menuStateActive@,
|
|||
}
|
||||
|
||||
.subviewbutton > .menu-accel-container {
|
||||
-moz-box-pack: start;
|
||||
-moz-box-pack: end;
|
||||
margin-inline-start: 10px;
|
||||
margin-inline-end: auto;
|
||||
color: GrayText;
|
||||
|
|
|
@ -308,7 +308,7 @@ skip-if = e10s && debug
|
|||
[browser_dbg_stack-02.js]
|
||||
skip-if = e10s && debug
|
||||
[browser_dbg_stack-03.js]
|
||||
skip-if = e10s || (!e10s && os == "win") # TODO, win !e10s: Bug 1391369
|
||||
skip-if = e10s || (!e10s && os == "win") || coverage # TODO, win !e10s: Bug 1391369, coverage: Bug 1400683
|
||||
[browser_dbg_stack-04.js]
|
||||
skip-if = e10s && debug || (!e10s && os == "win") # Bug 1391369
|
||||
[browser_dbg_stack-05.js]
|
||||
|
|
|
@ -95,6 +95,7 @@ let webpackConfig = {
|
|||
"devtools/client/shared/scroll": path.join(__dirname, "../../client/shared/scroll"),
|
||||
"devtools/client/shared/source-utils": path.join(__dirname, "../../client/shared/source-utils"),
|
||||
"devtools/client/shared/theme": path.join(__dirname, "../../client/shared/theme"),
|
||||
|
||||
"devtools/client/shared/vendor/immutable": "immutable",
|
||||
"devtools/client/shared/vendor/react": "react",
|
||||
"devtools/client/shared/vendor/react-dom": "react-dom",
|
||||
|
@ -102,6 +103,8 @@ let webpackConfig = {
|
|||
"devtools/client/shared/vendor/redux": "redux",
|
||||
"devtools/client/shared/vendor/reselect": "reselect",
|
||||
"devtools/client/shared/vendor/jszip": "jszip",
|
||||
"devtools/client/shared/vendor/lodash": path.join(__dirname, "../../client/shared/vendor/lodash"),
|
||||
|
||||
"devtools/client/shared/widgets/tooltip/HTMLTooltip": path.join(__dirname, "../../client/shared/widgets/tooltip/HTMLTooltip"),
|
||||
"devtools/client/shared/widgets/tooltip/ImageTooltipHelper": path.join(__dirname, "../../client/shared/widgets/tooltip/ImageTooltipHelper"),
|
||||
"devtools/client/shared/widgets/tooltip/TooltipToggle": path.join(__dirname, "../../client/shared/widgets/tooltip/TooltipToggle"),
|
||||
|
|
|
@ -34,7 +34,7 @@ InitContext(aom_codec_ctx_t* aCtx,
|
|||
{
|
||||
aom_codec_iface_t* dx = aom_codec_av1_dx();
|
||||
if (!dx) {
|
||||
return MediaResult(NS_ERROR_FAILURE,
|
||||
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Couldn't get AV1 decoder interface."));
|
||||
}
|
||||
|
||||
|
@ -57,7 +57,7 @@ InitContext(aom_codec_ctx_t* aCtx,
|
|||
auto res = aom_codec_dec_init(aCtx, dx, &config, flags);
|
||||
if (res != AOM_CODEC_OK) {
|
||||
LOG_RESULT(res, "Codec initialization failed!");
|
||||
return MediaResult(NS_ERROR_FAILURE,
|
||||
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("AOM error initializing AV1 decoder: %s",
|
||||
aom_codec_err_to_string(res)));
|
||||
}
|
||||
|
@ -92,8 +92,9 @@ AOMDecoder::Shutdown()
|
|||
RefPtr<MediaDataDecoder::InitPromise>
|
||||
AOMDecoder::Init()
|
||||
{
|
||||
if (NS_FAILED(InitContext(&mCodec, mInfo))) {
|
||||
return AOMDecoder::InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
MediaResult rv = InitContext(&mCodec, mInfo);
|
||||
if (NS_FAILED(rv)) {
|
||||
return AOMDecoder::InitPromise::CreateAndReject(rv,
|
||||
__func__);
|
||||
}
|
||||
return AOMDecoder::InitPromise::CreateAndResolve(TrackInfo::kVideoTrack,
|
||||
|
|
|
@ -69,14 +69,20 @@ OpusDataDecoder::Init()
|
|||
uint8_t *p = mInfo.mCodecSpecificConfig->Elements();
|
||||
if (length < sizeof(uint64_t)) {
|
||||
OPUS_DEBUG("CodecSpecificConfig too short to read codecDelay!");
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("CodecSpecificConfig too short to read codecDelay!")),
|
||||
__func__);
|
||||
}
|
||||
int64_t codecDelay = BigEndian::readUint64(p);
|
||||
length -= sizeof(uint64_t);
|
||||
p += sizeof(uint64_t);
|
||||
if (NS_FAILED(DecodeHeader(p, length))) {
|
||||
OPUS_DEBUG("Error decoding header!");
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Error decoding header!")),
|
||||
__func__);
|
||||
}
|
||||
|
||||
int r;
|
||||
|
@ -103,7 +109,10 @@ OpusDataDecoder::Init()
|
|||
}
|
||||
|
||||
return r == OPUS_OK ? InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__)
|
||||
: InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
: InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("could not create opus multistream decoder!")),
|
||||
__func__);
|
||||
}
|
||||
|
||||
nsresult
|
||||
|
|
|
@ -82,23 +82,34 @@ TheoraDecoder::Init()
|
|||
if (!XiphExtradataToHeaders(headers, headerLens,
|
||||
mInfo.mCodecSpecificConfig->Elements(),
|
||||
mInfo.mCodecSpecificConfig->Length())) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Could not get theora header.")),
|
||||
__func__);
|
||||
}
|
||||
for (size_t i = 0; i < headers.Length(); i++) {
|
||||
if (NS_FAILED(DoDecodeHeader(headers[i], headerLens[i]))) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Could not decode theora header.")),
|
||||
__func__);
|
||||
}
|
||||
}
|
||||
if (mPacketCount != 3) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Packet count is wrong.")),
|
||||
__func__);
|
||||
}
|
||||
|
||||
mTheoraDecoderContext = th_decode_alloc(&mTheoraInfo, mTheoraSetupInfo);
|
||||
if (mTheoraDecoderContext) {
|
||||
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
|
||||
} else {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_OUT_OF_MEMORY,
|
||||
RESULT_DETAIL("Could not allocate theora decoder.")),
|
||||
__func__);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -75,11 +75,16 @@ VorbisDataDecoder::Init()
|
|||
if (!XiphExtradataToHeaders(headers, headerLens,
|
||||
mInfo.mCodecSpecificConfig->Elements(),
|
||||
mInfo.mCodecSpecificConfig->Length())) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Could not get vorbis header.")),
|
||||
__func__);
|
||||
}
|
||||
for (size_t i = 0; i < headers.Length(); i++) {
|
||||
if (NS_FAILED(DecodeHeader(headers[i], headerLens[i]))) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Could not decode vorbis header.")),
|
||||
__func__);
|
||||
}
|
||||
}
|
||||
|
@ -88,12 +93,18 @@ VorbisDataDecoder::Init()
|
|||
|
||||
int r = vorbis_synthesis_init(&mVorbisDsp, &mVorbisInfo);
|
||||
if (r) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Systhesis init fail.")),
|
||||
__func__);
|
||||
}
|
||||
|
||||
r = vorbis_block_init(&mVorbisDsp, &mVorbisBlock);
|
||||
if (r) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Block init fail.")),
|
||||
__func__);
|
||||
}
|
||||
|
||||
if (mInfo.mRate != (uint32_t)mVorbisDsp.vi->rate) {
|
||||
|
@ -107,7 +118,10 @@ VorbisDataDecoder::Init()
|
|||
|
||||
AudioConfig::ChannelLayout layout(mVorbisDsp.vi->channels);
|
||||
if (!layout.IsValid()) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Invalid audio layout.")),
|
||||
__func__);
|
||||
}
|
||||
|
||||
return InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__);
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "AppleATDecoder.h"
|
||||
#include "AppleUtils.h"
|
||||
#include "MP4Decoder.h"
|
||||
#include "mp4_demuxer/Adts.h"
|
||||
#include "MediaInfo.h"
|
||||
#include "AppleATDecoder.h"
|
||||
#include "mozilla/Logging.h"
|
||||
#include "mozilla/SyncRunnable.h"
|
||||
#include "mozilla/UniquePtr.h"
|
||||
|
@ -55,8 +55,10 @@ RefPtr<MediaDataDecoder::InitPromise>
|
|||
AppleATDecoder::Init()
|
||||
{
|
||||
if (!mFormatID) {
|
||||
NS_ERROR("Non recognised format");
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Non recognised format")),
|
||||
__func__);
|
||||
}
|
||||
|
||||
return InitPromise::CreateAndResolve(TrackType::kAudioTrack, __func__);
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
|
||||
#include <CoreFoundation/CFString.h>
|
||||
|
||||
#include "AppleVTDecoder.h"
|
||||
#include "AppleCMLinker.h"
|
||||
#include "AppleDecoderModule.h"
|
||||
#include "AppleUtils.h"
|
||||
#include "AppleVTDecoder.h"
|
||||
#include "AppleVTLinker.h"
|
||||
#include "MediaData.h"
|
||||
#include "mozilla/ArrayUtils.h"
|
||||
|
@ -63,13 +63,13 @@ AppleVTDecoder::~AppleVTDecoder()
|
|||
RefPtr<MediaDataDecoder::InitPromise>
|
||||
AppleVTDecoder::Init()
|
||||
{
|
||||
nsresult rv = InitializeSession();
|
||||
MediaResult rv = InitializeSession();
|
||||
|
||||
if (NS_SUCCEEDED(rv)) {
|
||||
return InitPromise::CreateAndResolve(TrackType::kVideoTrack, __func__);
|
||||
}
|
||||
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
return InitPromise::CreateAndReject(rv, __func__);
|
||||
}
|
||||
|
||||
RefPtr<MediaDataDecoder::DecodePromise>
|
||||
|
@ -473,7 +473,7 @@ AppleVTDecoder::WaitForAsynchronousFrames()
|
|||
return NS_OK;
|
||||
}
|
||||
|
||||
nsresult
|
||||
MediaResult
|
||||
AppleVTDecoder::InitializeSession()
|
||||
{
|
||||
OSStatus rv;
|
||||
|
@ -487,8 +487,8 @@ AppleVTDecoder::InitializeSession()
|
|||
extensions,
|
||||
&mFormat);
|
||||
if (rv != noErr) {
|
||||
NS_ERROR("Couldn't create format description!");
|
||||
return NS_ERROR_FAILURE;
|
||||
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Couldn't create format description!"));
|
||||
}
|
||||
|
||||
// Contruct video decoder selection spec.
|
||||
|
@ -507,8 +507,8 @@ AppleVTDecoder::InitializeSession()
|
|||
&mSession);
|
||||
|
||||
if (rv != noErr) {
|
||||
NS_ERROR("Couldn't create decompression session!");
|
||||
return NS_ERROR_FAILURE;
|
||||
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Couldn't create decompression session!"));
|
||||
}
|
||||
|
||||
if (AppleVTLinker::skPropUsingHWAccel) {
|
||||
|
|
|
@ -92,7 +92,7 @@ private:
|
|||
const uint32_t mDisplayHeight;
|
||||
|
||||
// Method to set up the decompression session.
|
||||
nsresult InitializeSession();
|
||||
MediaResult InitializeSession();
|
||||
nsresult WaitForAsynchronousFrames();
|
||||
CFDictionaryRef CreateDecoderSpecification();
|
||||
CFDictionaryRef CreateDecoderExtensions();
|
||||
|
|
|
@ -30,12 +30,11 @@ FFmpegAudioDecoder<LIBAV_VER>::FFmpegAudioDecoder(FFmpegLibWrapper* aLib,
|
|||
RefPtr<MediaDataDecoder::InitPromise>
|
||||
FFmpegAudioDecoder<LIBAV_VER>::Init()
|
||||
{
|
||||
nsresult rv = InitDecoder();
|
||||
MediaResult rv = InitDecoder();
|
||||
|
||||
return rv == NS_OK
|
||||
return NS_SUCCEEDED(rv)
|
||||
? InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__)
|
||||
: InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
__func__);
|
||||
: InitPromise::CreateAndReject(rv, __func__);
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -38,22 +38,22 @@ FFmpegDataDecoder<LIBAV_VER>::~FFmpegDataDecoder()
|
|||
MOZ_COUNT_DTOR(FFmpegDataDecoder);
|
||||
}
|
||||
|
||||
nsresult
|
||||
MediaResult
|
||||
FFmpegDataDecoder<LIBAV_VER>::InitDecoder()
|
||||
{
|
||||
FFMPEG_LOG("Initialising FFmpeg decoder.");
|
||||
|
||||
AVCodec* codec = FindAVCodec(mLib, mCodecID);
|
||||
if (!codec) {
|
||||
NS_WARNING("Couldn't find ffmpeg decoder");
|
||||
return NS_ERROR_FAILURE;
|
||||
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Couldn't find ffmpeg decoder"));
|
||||
}
|
||||
|
||||
StaticMutexAutoLock mon(sMonitor);
|
||||
|
||||
if (!(mCodecContext = mLib->avcodec_alloc_context3(codec))) {
|
||||
NS_WARNING("Couldn't init ffmpeg context");
|
||||
return NS_ERROR_FAILURE;
|
||||
return MediaResult(NS_ERROR_OUT_OF_MEMORY,
|
||||
RESULT_DETAIL("Couldn't init ffmpeg context"));
|
||||
}
|
||||
|
||||
mCodecContext->opaque = this;
|
||||
|
@ -75,10 +75,10 @@ FFmpegDataDecoder<LIBAV_VER>::InitDecoder()
|
|||
}
|
||||
|
||||
if (mLib->avcodec_open2(mCodecContext, codec, nullptr) < 0) {
|
||||
NS_WARNING("Couldn't initialise ffmpeg decoder");
|
||||
mLib->avcodec_close(mCodecContext);
|
||||
mLib->av_freep(&mCodecContext);
|
||||
return NS_ERROR_FAILURE;
|
||||
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
RESULT_DETAIL("Couldn't initialise ffmpeg decoder"));
|
||||
}
|
||||
|
||||
FFMPEG_LOG("FFmpeg init successful.");
|
||||
|
|
|
@ -43,7 +43,7 @@ protected:
|
|||
virtual void ProcessShutdown();
|
||||
virtual void InitCodecContext() { }
|
||||
AVFrame* PrepareFrame();
|
||||
nsresult InitDecoder();
|
||||
MediaResult InitDecoder();
|
||||
|
||||
FFmpegLibWrapper* mLib;
|
||||
|
||||
|
|
|
@ -125,8 +125,9 @@ FFmpegVideoDecoder<LIBAV_VER>::FFmpegVideoDecoder(
|
|||
RefPtr<MediaDataDecoder::InitPromise>
|
||||
FFmpegVideoDecoder<LIBAV_VER>::Init()
|
||||
{
|
||||
if (NS_FAILED(InitDecoder())) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
|
||||
MediaResult rv = InitDecoder();
|
||||
if (NS_FAILED(rv)) {
|
||||
return InitPromise::CreateAndReject(rv, __func__);
|
||||
}
|
||||
|
||||
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
|
||||
|
|
|
@ -92,16 +92,16 @@ InProcessCompositorSession::Shutdown()
|
|||
// at which point CBP will defer a Release on the compositor thread. We
|
||||
// can safely release our reference now, and let the destructor run on either
|
||||
// thread.
|
||||
mCompositorBridgeChild->Destroy();
|
||||
mCompositorBridgeChild = nullptr;
|
||||
mCompositorBridgeParent = nullptr;
|
||||
mCompositorWidget = nullptr;
|
||||
#if defined(MOZ_WIDGET_ANDROID)
|
||||
if (mUiCompositorControllerChild) {
|
||||
mUiCompositorControllerChild->Destroy();
|
||||
mUiCompositorControllerChild = nullptr;
|
||||
}
|
||||
#endif //defined(MOZ_WIDGET_ANDROID)
|
||||
mCompositorBridgeChild->Destroy();
|
||||
mCompositorBridgeChild = nullptr;
|
||||
mCompositorBridgeParent = nullptr;
|
||||
mCompositorWidget = nullptr;
|
||||
GPUProcessManager::Get()->UnregisterInProcessSession(this);
|
||||
}
|
||||
|
||||
|
|
|
@ -2999,10 +2999,14 @@ void AsyncPanZoomController::AdjustScrollForSurfaceShift(const ScreenPoint& aShi
|
|||
/ mFrameMetrics.GetZoom();
|
||||
APZC_LOG("%p adjusting scroll position by %s for surface shift\n",
|
||||
this, Stringify(adjustment).c_str());
|
||||
CSSPoint scrollOffset = mFrameMetrics.GetScrollOffset();
|
||||
scrollOffset.y = mY.ClampOriginToScrollableRect(scrollOffset.y + adjustment.y);
|
||||
scrollOffset.x = mX.ClampOriginToScrollableRect(scrollOffset.x + adjustment.x);
|
||||
mFrameMetrics.SetScrollOffset(scrollOffset);
|
||||
CSSRect scrollRange = mFrameMetrics.CalculateScrollRange();
|
||||
// Apply shift to mFrameMetrics.mScrollOffset.
|
||||
mFrameMetrics.SetScrollOffset(scrollRange.ClampPoint(
|
||||
mFrameMetrics.GetScrollOffset() + adjustment));
|
||||
// Apply shift to mCompositedScrollOffset, since the dynamic toolbar expects
|
||||
// the shift to take effect right away, without the usual frame delay.
|
||||
mCompositedScrollOffset = scrollRange.ClampPoint(
|
||||
mCompositedScrollOffset + adjustment);
|
||||
RequestContentRepaint();
|
||||
UpdateSharedCompositorFrameMetrics();
|
||||
}
|
||||
|
|
|
@ -1059,7 +1059,16 @@ private:
|
|||
void
|
||||
LayerManagerComposite::RenderToPresentationSurface()
|
||||
{
|
||||
if (!mCompositor) {
|
||||
return;
|
||||
}
|
||||
|
||||
widget::CompositorWidget* const widget = mCompositor->GetWidget();
|
||||
|
||||
if (!widget) {
|
||||
return;
|
||||
}
|
||||
|
||||
ANativeWindow* window = widget->AsAndroid()->GetPresentationANativeWindow();
|
||||
|
||||
if (!window) {
|
||||
|
|
|
@ -198,7 +198,7 @@ UiCompositorControllerChild::Destroy()
|
|||
NewRunnableMethod("layers::UiCompositorControllerChild::Destroy",
|
||||
this,
|
||||
&UiCompositorControllerChild::Destroy),
|
||||
nsIThread::DISPATCH_NORMAL);
|
||||
nsIThread::DISPATCH_SYNC);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -14,9 +14,6 @@ namespace layers {
|
|||
|
||||
StackingContextHelper::StackingContextHelper()
|
||||
: mBuilder(nullptr)
|
||||
, mHasPerspectiveTransform(false)
|
||||
, mXScale(1.0f)
|
||||
, mYScale(1.0f)
|
||||
{
|
||||
// mOrigin remains at 0,0
|
||||
}
|
||||
|
@ -27,9 +24,6 @@ StackingContextHelper::StackingContextHelper(const StackingContextHelper& aParen
|
|||
const Maybe<gfx::Matrix4x4>& aTransform,
|
||||
const nsTArray<wr::WrFilterOp>& aFilters)
|
||||
: mBuilder(&aBuilder)
|
||||
, mHasPerspectiveTransform(false)
|
||||
, mXScale(1.0f)
|
||||
, mYScale(1.0f)
|
||||
{
|
||||
wr::LayoutRect scBounds = aParentSC.ToRelativeLayoutRect(aLayer->BoundsForStackingContext());
|
||||
Layer* layer = aLayer->GetLayer();
|
||||
|
@ -53,9 +47,6 @@ StackingContextHelper::StackingContextHelper(const StackingContextHelper& aParen
|
|||
gfx::Matrix4x4* aTransformPtr,
|
||||
const nsTArray<wr::WrFilterOp>& aFilters)
|
||||
: mBuilder(&aBuilder)
|
||||
, mHasPerspectiveTransform(false)
|
||||
, mXScale(1.0f)
|
||||
, mYScale(1.0f)
|
||||
{
|
||||
wr::LayoutRect scBounds = aParentSC.ToRelativeLayoutRect(aLayer->BoundsForStackingContext());
|
||||
if (aTransformPtr) {
|
||||
|
@ -86,70 +77,20 @@ StackingContextHelper::StackingContextHelper(const StackingContextHelper& aParen
|
|||
const nsTArray<wr::WrFilterOp>& aFilters,
|
||||
const gfx::CompositionOp& aMixBlendMode)
|
||||
: mBuilder(&aBuilder)
|
||||
, mHasPerspectiveTransform(false)
|
||||
, mXScale(1.0f)
|
||||
, mYScale(1.0f)
|
||||
{
|
||||
nsRect visibleRect;
|
||||
|
||||
bool is2d = !aTransformPtr || (aTransformPtr->Is2D() && !aPerspectivePtr);
|
||||
if (aTransformPtr) {
|
||||
mTransform = *aTransformPtr;
|
||||
}
|
||||
|
||||
// Apply the inherited scale from parent
|
||||
mTransform.PostScale(aParentSC.mXScale, aParentSC.mYScale, 1.0);
|
||||
mTransform.NudgeToIntegersFixedEpsilon();
|
||||
|
||||
if (aPerspectivePtr) {
|
||||
mHasPerspectiveTransform = true;
|
||||
}
|
||||
|
||||
bool is2d = !aTransformPtr || (aTransformPtr->Is2D() && !aPerspectivePtr);
|
||||
if (is2d) {
|
||||
nsRect itemBounds = aDisplayList->GetClippedBoundsWithRespectToASR(aDisplayListBuilder, aItem->GetActiveScrolledRoot());
|
||||
nsRect childrenVisible = aItem->GetVisibleRectForChildren();
|
||||
visibleRect = itemBounds.Intersect(childrenVisible);
|
||||
|
||||
// Calculate the correct scale for current stacking context
|
||||
gfx::Size scale = mTransform.As2D().ScaleFactors(true);
|
||||
|
||||
// Restore the scale to default if the scale is too small
|
||||
if (FuzzyEqualsAdditive(scale.width, 0.0f) ||
|
||||
FuzzyEqualsAdditive(scale.height, 0.0f)) {
|
||||
scale = gfx::Size(1.0f, 1.0f);
|
||||
}
|
||||
|
||||
mTransform.PreScale(1.0f/scale.width, 1.0f/scale.height, 1.0);
|
||||
|
||||
// Store the inherited scale for child
|
||||
this->mXScale = scale.width;
|
||||
this->mYScale = scale.height;
|
||||
} else {
|
||||
visibleRect = aDisplayList->GetBounds(aDisplayListBuilder);
|
||||
// The position of bounds are calculated by transform and perspective matrix in 3d case. reset it to (0, 0)
|
||||
visibleRect.MoveTo(0, 0);
|
||||
}
|
||||
float appUnitsPerDevPixel = aItem->Frame()->PresContext()->AppUnitsPerDevPixel();
|
||||
LayerRect bounds = ViewAs<LayerPixel>(LayoutDeviceRect::FromAppUnits(visibleRect, appUnitsPerDevPixel),
|
||||
PixelCastJustification::WebRenderHasUnitResolution);
|
||||
|
||||
// WR will only apply the 'translate' of the transform, so we need to do the scale/rotation manually.
|
||||
if (aBoundTransform && !aBoundTransform->IsIdentity() && is2d) {
|
||||
bounds.MoveTo(aBoundTransform->TransformPoint(bounds.TopLeft()));
|
||||
}
|
||||
|
||||
wr::LayoutRect scBounds = aParentSC.ToRelativeLayoutRect(bounds);
|
||||
|
||||
mBuilder->PushStackingContext(scBounds,
|
||||
mBuilder->PushStackingContext(wr::LayoutRect(),
|
||||
aAnimationsId,
|
||||
aOpacityPtr,
|
||||
aTransformPtr ? &mTransform : aTransformPtr,
|
||||
aTransformPtr,
|
||||
is2d ? wr::TransformStyle::Flat : wr::TransformStyle::Preserve3D,
|
||||
aPerspectivePtr,
|
||||
wr::ToMixBlendMode(aMixBlendMode),
|
||||
aFilters);
|
||||
|
||||
mOrigin = bounds.TopLeft();
|
||||
}
|
||||
|
||||
StackingContextHelper::~StackingContextHelper()
|
||||
|
@ -162,25 +103,13 @@ StackingContextHelper::~StackingContextHelper()
|
|||
wr::LayoutRect
|
||||
StackingContextHelper::ToRelativeLayoutRect(const LayerRect& aRect) const
|
||||
{
|
||||
// Multiply by the scale inherited from ancestors if exits
|
||||
LayerRect aMaybeScaledRect = aRect;
|
||||
if (mXScale != 1.0f || mYScale != 1.0f) {
|
||||
aMaybeScaledRect.Scale(mXScale, mYScale);
|
||||
}
|
||||
|
||||
return wr::ToLayoutRect(RoundedToInt(aMaybeScaledRect - mOrigin));
|
||||
return wr::ToLayoutRect(RoundedToInt(aRect - mOrigin));
|
||||
}
|
||||
|
||||
wr::LayoutRect
|
||||
StackingContextHelper::ToRelativeLayoutRect(const LayoutDeviceRect& aRect) const
|
||||
{
|
||||
// Multiply by the scale inherited from ancestors if exits
|
||||
LayoutDeviceRect aMaybeScaledRect = aRect;
|
||||
if (mXScale != 1.0f || mYScale != 1.0f) {
|
||||
aMaybeScaledRect.Scale(mXScale, mYScale);
|
||||
}
|
||||
|
||||
return wr::ToLayoutRect(RoundedToInt(ViewAs<LayerPixel>(aMaybeScaledRect,
|
||||
return wr::ToLayoutRect(RoundedToInt(ViewAs<LayerPixel>(aRect,
|
||||
PixelCastJustification::WebRenderHasUnitResolution) - mOrigin));
|
||||
}
|
||||
|
||||
|
|
|
@ -83,28 +83,12 @@ public:
|
|||
// Same but for points
|
||||
wr::LayoutPoint ToRelativeLayoutPoint(const LayerPoint& aPoint) const;
|
||||
|
||||
// Export the inherited scale
|
||||
gfx::Size GetInheritedScale() const {
|
||||
return gfx::Size(mXScale, mYScale);
|
||||
}
|
||||
|
||||
// Provide interface to setup the inherited scale to support
|
||||
// special cases, like OMTA
|
||||
void SetInheritedScale(const gfx::Size& aScale) {
|
||||
mXScale = aScale.width;
|
||||
mYScale = aScale.height;
|
||||
}
|
||||
|
||||
bool IsBackfaceVisible() const { return mTransform.IsBackfaceVisible(); }
|
||||
bool HasPerspectiveTransform() const { return mHasPerspectiveTransform; }
|
||||
|
||||
private:
|
||||
wr::DisplayListBuilder* mBuilder;
|
||||
LayerPoint mOrigin;
|
||||
gfx::Matrix4x4 mTransform;
|
||||
bool mHasPerspectiveTransform;
|
||||
float mXScale;
|
||||
float mYScale;
|
||||
};
|
||||
|
||||
} // namespace layers
|
||||
|
|
|
@ -5529,20 +5529,34 @@ nsDisplayBoxShadowInner::CanCreateWebRenderCommands(nsDisplayListBuilder* aBuild
|
|||
nsIFrame* aFrame,
|
||||
nsPoint aReferenceOffset)
|
||||
{
|
||||
nsRect borderRect = nsRect(aReferenceOffset, aFrame->GetSize());
|
||||
RectCornerRadii innerRadii;
|
||||
bool hasBorderRadius =
|
||||
nsCSSRendering::GetShadowInnerRadii(aFrame, borderRect, innerRadii);
|
||||
if (hasBorderRadius) {
|
||||
return false;
|
||||
}
|
||||
|
||||
nsCSSShadowArray *shadows = aFrame->StyleEffects()->mBoxShadow;
|
||||
if (!shadows) {
|
||||
// Means we don't have to paint anything
|
||||
return true;
|
||||
}
|
||||
|
||||
bool hasBorderRadius;
|
||||
bool nativeTheme =
|
||||
nsCSSRendering::HasBoxShadowNativeTheme(aFrame, hasBorderRadius);
|
||||
|
||||
// We don't support native themed things yet like box shadows around
|
||||
// input buttons.
|
||||
if (nativeTheme) {
|
||||
return false;
|
||||
}
|
||||
|
||||
nsRect borderRect = nsRect(aReferenceOffset, aFrame->GetSize());
|
||||
RectCornerRadii innerRadii;
|
||||
|
||||
if (hasBorderRadius) {
|
||||
hasBorderRadius =
|
||||
nsCSSRendering::GetShadowInnerRadii(aFrame, borderRect, innerRadii);
|
||||
}
|
||||
|
||||
if (hasBorderRadius && !innerRadii.AreRadiiSame()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -7915,12 +7929,18 @@ nsDisplayTransform::CreateWebRenderCommands(mozilla::wr::DisplayListBuilder& aBu
|
|||
// transform animation, the transform value will be resolved
|
||||
// after animation sampling on the compositor
|
||||
transformForSC = nullptr;
|
||||
|
||||
// Pass default transform to compositor in case gecko fails to
|
||||
// get animated value after animation sampling.
|
||||
OptionalTransform transformForCompositor = newTransformMatrix;
|
||||
|
||||
OpAddCompositorAnimations
|
||||
anim(CompositorAnimations(animationInfo.GetAnimations(), animationsId),
|
||||
transformForCompositor, void_t());
|
||||
aManager->WrBridge()->AddWebRenderParentCommand(anim);
|
||||
}
|
||||
|
||||
gfx::Matrix4x4Typed<LayerPixel, LayerPixel> boundTransform = ViewAs< gfx::Matrix4x4Typed<LayerPixel, LayerPixel> >(newTransformMatrix);
|
||||
boundTransform._41 = 0.0f;
|
||||
boundTransform._42 = 0.0f;
|
||||
boundTransform._43 = 0.0f;
|
||||
gfx::Matrix4x4Typed<LayerPixel, LayerPixel> boundTransform = ViewAs<gfx::Matrix4x4Typed<LayerPixel, LayerPixel>>(newTransformMatrix);
|
||||
|
||||
nsTArray<mozilla::wr::WrFilterOp> filters;
|
||||
StackingContextHelper sc(aSc,
|
||||
|
@ -7935,33 +7955,6 @@ nsDisplayTransform::CreateWebRenderCommands(mozilla::wr::DisplayListBuilder& aBu
|
|||
nullptr,
|
||||
filters);
|
||||
|
||||
if (animationsId) {
|
||||
// Get the inheritedScale from parent and pass the scale to compositor
|
||||
// to get correct sampling result
|
||||
gfx::Size scale = aSc.GetInheritedScale();
|
||||
for (layers::Animation& animation : animationInfo.GetAnimations()) {
|
||||
if (animation.property() == eCSSProperty_transform) {
|
||||
TransformData& transformData = animation.data().get_TransformData();
|
||||
transformData.inheritedXScale() = scale.width;
|
||||
transformData.inheritedYScale() = scale.height;
|
||||
transformData.hasPerspectiveParent() = aSc.HasPerspectiveTransform();
|
||||
}
|
||||
}
|
||||
|
||||
// Pass default transform to compositor in case gecko fails to
|
||||
// get animated value after animation sampling.
|
||||
OptionalTransform transformForCompositor = newTransformMatrix;
|
||||
OpAddCompositorAnimations
|
||||
anim(CompositorAnimations(animationInfo.GetAnimations(), animationsId),
|
||||
transformForCompositor, void_t());
|
||||
aManager->WrBridge()->AddWebRenderParentCommand(anim);
|
||||
|
||||
// Since we passed a nullptr transformForSC to the StackingContextHelper,
|
||||
// we now set up the correct inherited scale for the stacking context.
|
||||
newTransformMatrix.PostScale(scale.width, scale.height, 1.0f);
|
||||
sc.SetInheritedScale(newTransformMatrix.As2D().ScaleFactors(true));
|
||||
|
||||
}
|
||||
return mStoredList.CreateWebRenderCommands(aBuilder, aResources, sc, aParentCommands,
|
||||
aManager, aDisplayListBuilder);
|
||||
}
|
||||
|
|
|
@ -43,10 +43,10 @@ skip-if(!asyncPan) == position-fixed-inside-sticky-2.html position-fixed-inside-
|
|||
fuzzy(1,60000) skip-if(!asyncPan) == group-opacity-surface-size-1.html group-opacity-surface-size-1-ref.html
|
||||
fails-if(webrender) skip-if(!asyncPan) == position-sticky-transformed.html position-sticky-transformed-ref.html # bug 1366295 for webrender
|
||||
skip-if(!asyncPan) == offscreen-prerendered-active-opacity.html offscreen-prerendered-active-opacity-ref.html
|
||||
fuzzy-if(Android,6,4) fuzzy-if(skiaContent&&!Android,1,34) skip-if(!asyncPan) fuzzy-if(webrender,128-128,10-10) == offscreen-clipped-blendmode-1.html offscreen-clipped-blendmode-ref.html
|
||||
fuzzy-if(Android,6,4) fuzzy-if(webrender,128-128,10-10) skip-if(!asyncPan) == offscreen-clipped-blendmode-2.html offscreen-clipped-blendmode-ref.html
|
||||
fuzzy-if(Android,6,4) fuzzy-if(skiaContent&&!Android,1,34) skip-if(!asyncPan) == offscreen-clipped-blendmode-1.html offscreen-clipped-blendmode-ref.html
|
||||
fuzzy-if(Android,6,4) skip-if(!asyncPan) == offscreen-clipped-blendmode-2.html offscreen-clipped-blendmode-ref.html
|
||||
fuzzy-if(Android,6,4) skip == offscreen-clipped-blendmode-3.html offscreen-clipped-blendmode-ref.html # bug 1251588 - wrong AGR on mix-blend-mode item
|
||||
fuzzy-if(Android,6,4) fuzzy-if(webrender,128-128,10-10) skip-if(!asyncPan) == offscreen-clipped-blendmode-4.html offscreen-clipped-blendmode-ref.html
|
||||
fuzzy-if(Android,6,4) skip-if(!asyncPan) == offscreen-clipped-blendmode-4.html offscreen-clipped-blendmode-ref.html
|
||||
fuzzy-if(Android,7,4) skip-if(!asyncPan) == perspective-scrolling-1.html perspective-scrolling-1-ref.html
|
||||
fuzzy-if(Android,7,4) skip-if(!asyncPan) == perspective-scrolling-2.html perspective-scrolling-2-ref.html
|
||||
fuzzy-if(Android,7,4) fails-if(webrender) skip-if(!asyncPan) == perspective-scrolling-3.html perspective-scrolling-3-ref.html # bug 1361720 for webrender
|
||||
|
|
|
@ -14,7 +14,7 @@ fails-if(Android) fuzzy-if(webrender,50,3310) == boxshadow-button.html boxshadow
|
|||
fuzzy-if(OSX==1010,1,24) fuzzy-if(d2d,16,908) fuzzy-if(webrender,18,2160) == boxshadow-large-border-radius.html boxshadow-large-border-radius-ref.html # Bug 1209649
|
||||
|
||||
fails-if(Android) == boxshadow-fileupload.html boxshadow-fileupload-ref.html
|
||||
fuzzy-if(skiaContent,13,28) == boxshadow-inner-basic.html boxshadow-inner-basic-ref.svg
|
||||
fuzzy-if(skiaContent,13,28) fuzzy-if(webrender,29-29,450-450) == boxshadow-inner-basic.html boxshadow-inner-basic-ref.svg
|
||||
random-if(layersGPUAccelerated) == boxshadow-mixed.html boxshadow-mixed-ref.html
|
||||
random-if(d2d) fuzzy-if(skiaContent,1,100) fuzzy-if(webrender,127,3528) == boxshadow-rounded-spread.html boxshadow-rounded-spread-ref.html
|
||||
fuzzy-if(skiaContent,1,50) HTTP(..) == boxshadow-dynamic.xul boxshadow-dynamic-ref.xul
|
||||
|
@ -25,7 +25,7 @@ fuzzy(2,440) fuzzy-if(webrender,25,1300) == boxshadow-skiprect.html boxshadow-sk
|
|||
== boxshadow-opacity.html boxshadow-opacity-ref.html
|
||||
== boxshadow-color-rounding.html boxshadow-color-rounding-ref.html
|
||||
== boxshadow-color-rounding-middle.html boxshadow-color-rounding-middle-ref.html
|
||||
fuzzy(3,500) fuzzy-if(d2d,2,1080) fuzzy-if(webrender,12,1500) == boxshadow-border-radius-int.html boxshadow-border-radius-int-ref.html
|
||||
fuzzy(3,500) fuzzy-if(d2d,2,1080) fuzzy-if(webrender,13-13,1000-1000) == boxshadow-border-radius-int.html boxshadow-border-radius-int-ref.html
|
||||
fuzzy-if(webrender,1,4) == boxshadow-inset-neg-spread.html about:blank
|
||||
== boxshadow-inset-neg-spread2.html boxshadow-inset-neg-spread2-ref.html
|
||||
fuzzy(26,3610) fuzzy-if(d2d,26,5910) fuzzy-if(webrender,43,200) == boxshadow-rotated.html boxshadow-rotated-ref.html # Bug 1211264
|
||||
|
@ -40,7 +40,7 @@ fuzzy(13,9445) fuzzy-if(d2d,13,10926) fuzzy-if(webrender,14,14307) == boxshadow-
|
|||
fuzzy-if(webrender,1,655) == 611574-1.html 611574-1-ref.html
|
||||
fuzzy-if(webrender,4,144) == 611574-2.html 611574-2-ref.html
|
||||
fuzzy-if(winWidget,5,30) fuzzy-if(skiaContent,16,10) fuzzy-if(webrender,162-162,120-120) == fieldset.html fieldset-ref.html # minor anti-aliasing problem on Windows
|
||||
fuzzy-if(winWidget,5,30) fuzzy-if(skiaContent,16,10) fuzzy-if(webrender,165-165,120-120) == fieldset-inset.html fieldset-inset-ref.html # minor anti-aliasing problem on Windows
|
||||
fuzzy-if(winWidget,5,30) fuzzy-if(skiaContent,16,10) fails-if(webrender) == fieldset-inset.html fieldset-inset-ref.html # minor anti-aliasing problem on Windows
|
||||
== 1178575.html 1178575-ref.html
|
||||
== 1178575-2.html 1178575-2-ref.html
|
||||
fuzzy(159,2) fails-if(!dwrite) == 1212823-1.html 1212823-1-ref.html
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
default-preferences pref(layout.css.box-decoration-break.enabled,true)
|
||||
|
||||
== box-decoration-break-1.html box-decoration-break-1-ref.html
|
||||
fuzzy(1,20) fuzzy-if(skiaContent,1,700) fuzzy-if(webrender,4-4,36-36) == box-decoration-break-with-inset-box-shadow-1.html box-decoration-break-with-inset-box-shadow-1-ref.html
|
||||
fuzzy(1,20) fuzzy-if(skiaContent,1,700) fuzzy-if(webrender,8-8,242-242) == box-decoration-break-with-inset-box-shadow-1.html box-decoration-break-with-inset-box-shadow-1-ref.html
|
||||
fuzzy(45,460) fuzzy-if(skiaContent,57,439) fuzzy-if(Android,57,1330) fuzzy-if(styloVsGecko,45,1410) == box-decoration-break-with-outset-box-shadow-1.html box-decoration-break-with-outset-box-shadow-1-ref.html # Bug 1386543
|
||||
random-if(!gtkWidget) HTTP(..) == box-decoration-break-border-image.html box-decoration-break-border-image-ref.html
|
||||
== box-decoration-break-block-border-padding.html box-decoration-break-block-border-padding-ref.html
|
||||
|
|
|
@ -181,7 +181,8 @@ HTTP(..) == reflow-sanity-delay-1c.html reflow-sanity-1-ref.html
|
|||
HTTP(..) == reflow-sanity-delay-1-metrics.html reflow-sanity-1-ref.html
|
||||
|
||||
# font-display
|
||||
pref(layout.css.font-display.enabled,true) HTTP(..) == font-display-1.html font-display-1-ref.html # normal font load (~500ms)
|
||||
skip-if(/^Linux\x20i686/.test(http.oscpu)) pref(layout.css.font-display.enabled,true) HTTP(..) == font-display-1.html font-display-1-ref.html # normal font load (~500ms)
|
||||
# ^ disabled due to intermittents due to timing issues -- Bug 1238222
|
||||
pref(layout.css.font-display.enabled,true) fuzzy-if(OSX==1010,3,5) HTTP(..) == font-display-2.html font-display-2-ref.html # font load takes 4500ms
|
||||
|
||||
# Testing hack for Meiryo
|
||||
|
|
|
@ -1525,6 +1525,15 @@ Gecko_ImageValue_Create(ServoBundledURI aURI, ServoRawOffsetArc<RustString> aURI
|
|||
return value.forget().take();
|
||||
}
|
||||
|
||||
MOZ_DEFINE_MALLOC_SIZE_OF(GeckoImageValueMallocSizeOf)
|
||||
|
||||
size_t
|
||||
Gecko_ImageValue_SizeOfIncludingThis(mozilla::css::ImageValue* aImageValue)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
return aImageValue->SizeOfIncludingThis(GeckoImageValueMallocSizeOf);
|
||||
}
|
||||
|
||||
void
|
||||
Gecko_SetLayerImageImageValue(nsStyleImage* aImage,
|
||||
mozilla::css::ImageValue* aImageValue)
|
||||
|
|
|
@ -344,6 +344,7 @@ void Gecko_SetGradientImageValue(nsStyleImage* image, nsStyleGradient* gradient)
|
|||
NS_DECL_THREADSAFE_FFI_REFCOUNTING(mozilla::css::ImageValue, ImageValue);
|
||||
mozilla::css::ImageValue* Gecko_ImageValue_Create(ServoBundledURI aURI,
|
||||
mozilla::ServoRawOffsetArc<RustString> aURIString);
|
||||
size_t Gecko_ImageValue_SizeOfIncludingThis(mozilla::css::ImageValue* aImageValue);
|
||||
void Gecko_SetLayerImageImageValue(nsStyleImage* image,
|
||||
mozilla::css::ImageValue* aImageValue);
|
||||
|
||||
|
|
|
@ -2149,8 +2149,7 @@ nsCSSValue::SizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
|
|||
|
||||
// Image
|
||||
case eCSSUnit_Image:
|
||||
// Not yet measured. Measurement may be added later if DMD finds it
|
||||
// worthwhile.
|
||||
n += mValue.mImage->SizeOfIncludingThis(aMallocSizeOf);
|
||||
break;
|
||||
|
||||
// Gradient
|
||||
|
@ -3205,6 +3204,15 @@ css::ImageValue::~ImageValue()
|
|||
}
|
||||
}
|
||||
|
||||
size_t
|
||||
css::ImageValue::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
|
||||
{
|
||||
size_t n = aMallocSizeOf(this);
|
||||
n += css::URLValueData::SizeOfExcludingThis(aMallocSizeOf);
|
||||
n += mRequests.ShallowSizeOfExcludingThis(aMallocSizeOf);
|
||||
return n;
|
||||
}
|
||||
|
||||
size_t
|
||||
css::ComplexColorValue::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
|
||||
{
|
||||
|
|
|
@ -262,7 +262,7 @@ struct ImageValue final : public URLValueData
|
|||
|
||||
void Initialize(nsIDocument* aDocument);
|
||||
|
||||
// XXXheycam We should have our own SizeOfIncludingThis method.
|
||||
size_t SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;
|
||||
|
||||
protected:
|
||||
~ImageValue();
|
||||
|
|
|
@ -391,6 +391,19 @@ function run() {
|
|||
should_not_apply("(aspect-ratio: 5900/7999)");
|
||||
should_apply("(aspect-ratio)");
|
||||
|
||||
// Test "unreasonable", but still valid aspect ratios, such as aspect ratios with negative numbers,
|
||||
// and zeros, and with numbers near 2^32 and 2^64 (to check overflow).
|
||||
should_not_apply("(aspect-ratio: 0/1)");
|
||||
should_not_apply("(aspect-ratio: 1/0)");
|
||||
should_not_apply("(aspect-ratio: -1/1)");
|
||||
should_not_apply("(aspect-ratio: 1/-1)");
|
||||
should_not_apply("(aspect-ratio: -1/-1)");
|
||||
should_not_apply("(aspect-ratio: -59/-80)");
|
||||
should_not_apply("(aspect-ratio: 4294967295/4294967295)");
|
||||
should_not_apply("(aspect-ratio: 4294967297/4294967297)");
|
||||
should_not_apply("(aspect-ratio: 18446744073709560000/18446744073709560000)");
|
||||
|
||||
// Test min and max aspect ratios.
|
||||
should_apply("(min-aspect-ratio: 59/80)");
|
||||
should_apply("(min-aspect-ratio: 58/80)");
|
||||
should_apply("(min-aspect-ratio: 59/81)");
|
||||
|
|
|
@ -574,8 +574,11 @@ pref("apz.second_tap_tolerance", "0.3");
|
|||
pref("apz.touch_move_tolerance", "0.03");
|
||||
pref("apz.touch_start_tolerance", "0.06");
|
||||
|
||||
// Enabling this on Fennec is blocked on a proper fix for bug 1390145.
|
||||
#ifdef NIGHTLY_BUILD
|
||||
pref("apz.frame_delay.enabled", true);
|
||||
#else
|
||||
pref("apz.frame_delay.enabled", false);
|
||||
#endif
|
||||
|
||||
pref("layers.progressive-paint", true);
|
||||
pref("layers.low-precision-buffer", true);
|
||||
|
|
|
@ -15,8 +15,12 @@ import org.mozilla.gecko.widget.themed.ThemedLinearLayout;
|
|||
import org.mozilla.gecko.widget.themed.ThemedTextView;
|
||||
|
||||
import android.content.Context;
|
||||
import android.content.res.ColorStateList;
|
||||
import android.content.res.Resources;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Color;
|
||||
import android.graphics.drawable.Drawable;
|
||||
import android.support.v4.graphics.drawable.DrawableCompat;
|
||||
import android.support.v4.widget.TextViewCompat;
|
||||
import android.util.AttributeSet;
|
||||
import android.view.LayoutInflater;
|
||||
|
@ -112,6 +116,13 @@ public class TabStripItemView extends ThemedLinearLayout
|
|||
|
||||
this.checked = checked;
|
||||
refreshDrawableState();
|
||||
|
||||
// Tint the close view based on current checked status.
|
||||
final ColorStateList colorStateList = closeView.getDrawableColors();
|
||||
final int tintColor = colorStateList.getColorForState(getDrawableState(), Color.TRANSPARENT);
|
||||
final Drawable drawable = DrawableCompat.wrap(closeView.getDrawable());
|
||||
DrawableCompat.setTint(drawable, tintColor);
|
||||
closeView.setImageDrawable(drawable);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -166,6 +166,10 @@ public class ThemedEditText extends android.widget.EditText
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -166,6 +166,10 @@ public class ThemedFrameLayout extends android.widget.FrameLayout
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -194,6 +194,10 @@ public class ThemedImageButton extends android.widget.ImageButton
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -193,6 +193,10 @@ public class ThemedImageView extends android.widget.ImageView
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -161,6 +161,10 @@ public class ThemedLinearLayout extends android.widget.LinearLayout
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -166,6 +166,10 @@ public class ThemedListView extends android.widget.ListView
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -166,6 +166,10 @@ public class ThemedProgressBar extends android.widget.ProgressBar
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -166,6 +166,10 @@ public class ThemedRelativeLayout extends android.widget.RelativeLayout
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -161,6 +161,10 @@ public class ThemedTextSwitcher extends android.widget.TextSwitcher
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -166,6 +166,10 @@ public class ThemedTextView extends android.widget.TextView
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -166,6 +166,10 @@ public class ThemedView extends android.view.View
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -205,6 +205,10 @@ public class Themed@VIEW_NAME_SUFFIX@ extends @BASE_TYPE@
|
|||
return new ColorDrawable(ContextCompat.getColor(getContext(), id));
|
||||
}
|
||||
|
||||
public ColorStateList getDrawableColors() {
|
||||
return drawableColors;
|
||||
}
|
||||
|
||||
protected LightweightTheme getTheme() {
|
||||
return theme;
|
||||
}
|
||||
|
|
|
@ -1140,4 +1140,4 @@ static const TransportSecurityPreload kPublicKeyPinningPreloadList[] = {
|
|||
|
||||
static const int32_t kUnknownId = -1;
|
||||
|
||||
static const PRTime kPreloadPKPinsExpirationTime = INT64_C(1514137059431000);
|
||||
static const PRTime kPreloadPKPinsExpirationTime = INT64_C(1514223123051000);
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -30,7 +30,7 @@
|
|||
use app_units::{Au, MAX_AU};
|
||||
use context::LayoutContext;
|
||||
use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode};
|
||||
use display_list_builder::{DisplayListBuildState, EstablishContainingBlock};
|
||||
use display_list_builder::{DisplayListBuildState, StackingContextCollectionFlags};
|
||||
use display_list_builder::StackingContextCollectionState;
|
||||
use euclid::{Point2D, Rect, SideOffsets2D, Size2D};
|
||||
use floats::{ClearType, FloatKind, Floats, PlacementInfo};
|
||||
|
@ -2152,7 +2152,7 @@ impl Flow for BlockFlow {
|
|||
}
|
||||
|
||||
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
|
||||
self.collect_stacking_contexts_for_block(state, EstablishContainingBlock::Yes);
|
||||
self.collect_stacking_contexts_for_block(state, StackingContextCollectionFlags::empty());
|
||||
}
|
||||
|
||||
fn build_display_list(&mut self, state: &mut DisplayListBuildState) {
|
||||
|
|
|
@ -103,11 +103,10 @@ fn convert_repeat_mode(from: RepeatKeyword) -> RepeatMode {
|
|||
}
|
||||
}
|
||||
|
||||
fn establishes_containing_block_for_absolute(can_establish_containing_block: EstablishContainingBlock,
|
||||
fn establishes_containing_block_for_absolute(flags: StackingContextCollectionFlags,
|
||||
positioning: position::T)
|
||||
-> bool {
|
||||
can_establish_containing_block == EstablishContainingBlock::Yes &&
|
||||
position::T::static_ != positioning
|
||||
!flags.contains(NEVER_CREATES_CONTAINING_BLOCK) && position::T::static_ != positioning
|
||||
}
|
||||
|
||||
trait RgbColor {
|
||||
|
@ -2346,16 +2345,19 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
pub enum EstablishContainingBlock {
|
||||
Yes,
|
||||
No,
|
||||
bitflags! {
|
||||
pub flags StackingContextCollectionFlags: u8 {
|
||||
/// This flow never establishes a containing block.
|
||||
const NEVER_CREATES_CONTAINING_BLOCK = 0x01,
|
||||
/// This flow never creates a ClipScrollNode.
|
||||
const NEVER_CREATES_CLIP_SCROLL_NODE = 0x02,
|
||||
}
|
||||
}
|
||||
|
||||
pub trait BlockFlowDisplayListBuilding {
|
||||
fn collect_stacking_contexts_for_block(&mut self,
|
||||
state: &mut StackingContextCollectionState,
|
||||
can_establish_containing_block: EstablishContainingBlock);
|
||||
flags: StackingContextCollectionFlags);
|
||||
|
||||
fn transform_clip_to_coordinate_space(&mut self,
|
||||
state: &mut StackingContextCollectionState,
|
||||
|
@ -2364,7 +2366,7 @@ pub trait BlockFlowDisplayListBuilding {
|
|||
state: &mut StackingContextCollectionState,
|
||||
preserved_state: &mut SavedStackingContextCollectionState,
|
||||
stacking_context_type: BlockStackingContextType,
|
||||
can_establish_containing_block: EstablishContainingBlock)
|
||||
flags: StackingContextCollectionFlags)
|
||||
-> ClipAndScrollInfo;
|
||||
fn setup_clip_scroll_node_for_position(&mut self,
|
||||
state: &mut StackingContextCollectionState,
|
||||
|
@ -2521,7 +2523,7 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
|
||||
fn collect_stacking_contexts_for_block(&mut self,
|
||||
state: &mut StackingContextCollectionState,
|
||||
can_establish_containing_block: EstablishContainingBlock) {
|
||||
flags: StackingContextCollectionFlags) {
|
||||
let mut preserved_state = SavedStackingContextCollectionState::new(state);
|
||||
|
||||
let block_stacking_context_type = self.block_stacking_context_type();
|
||||
|
@ -2544,10 +2546,9 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
self.setup_clipping_for_block(state,
|
||||
&mut preserved_state,
|
||||
block_stacking_context_type,
|
||||
can_establish_containing_block);
|
||||
flags);
|
||||
|
||||
if establishes_containing_block_for_absolute(can_establish_containing_block,
|
||||
self.positioning()) {
|
||||
if establishes_containing_block_for_absolute(flags, self.positioning()) {
|
||||
state.containing_block_clip_and_scroll_info = state.current_clip_and_scroll_info;
|
||||
}
|
||||
|
||||
|
@ -2574,7 +2575,7 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
state: &mut StackingContextCollectionState,
|
||||
preserved_state: &mut SavedStackingContextCollectionState,
|
||||
stacking_context_type: BlockStackingContextType,
|
||||
can_establish_containing_block: EstablishContainingBlock)
|
||||
flags: StackingContextCollectionFlags)
|
||||
-> ClipAndScrollInfo {
|
||||
// If this block is absolutely positioned, we should be clipped and positioned by
|
||||
// the scroll root of our nearest ancestor that establishes a containing block.
|
||||
|
@ -2602,14 +2603,17 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
self.transform_clip_to_coordinate_space(state, preserved_state);
|
||||
}
|
||||
|
||||
if !flags.contains(NEVER_CREATES_CLIP_SCROLL_NODE) {
|
||||
self.setup_clip_scroll_node_for_position(state, &stacking_relative_border_box);
|
||||
self.setup_clip_scroll_node_for_overflow(state, &stacking_relative_border_box);
|
||||
self.setup_clip_scroll_node_for_css_clip(state, preserved_state, &stacking_relative_border_box);
|
||||
self.setup_clip_scroll_node_for_css_clip(state, preserved_state,
|
||||
&stacking_relative_border_box);
|
||||
}
|
||||
self.base.clip = state.clip_stack.last().cloned().unwrap_or_else(max_rect);
|
||||
|
||||
// We keep track of our position so that any stickily positioned elements can
|
||||
// properly determine the extent of their movement relative to scrolling containers.
|
||||
if can_establish_containing_block == EstablishContainingBlock::Yes {
|
||||
if !flags.contains(NEVER_CREATES_CONTAINING_BLOCK) {
|
||||
let border_box = if self.fragment.establishes_stacking_context() {
|
||||
stacking_relative_border_box
|
||||
} else {
|
||||
|
@ -2678,6 +2682,9 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
|
||||
let new_clip_scroll_node_id = ClipId::new(self.fragment.unique_id(IdType::OverflowClip),
|
||||
state.pipeline_id.to_webrender());
|
||||
if state.has_clip_scroll_node(new_clip_scroll_node_id) {
|
||||
return;
|
||||
}
|
||||
let parent_id = self.clip_and_scroll_info(state.pipeline_id).scroll_node_id;
|
||||
state.add_clip_scroll_node(
|
||||
ClipScrollNode {
|
||||
|
@ -2928,7 +2935,7 @@ impl InlineFlowDisplayListBuilding for InlineFlow {
|
|||
|
||||
for fragment in self.fragments.fragments.iter_mut() {
|
||||
let previous_cb_clip_scroll_info = state.containing_block_clip_and_scroll_info;
|
||||
if establishes_containing_block_for_absolute(EstablishContainingBlock::Yes,
|
||||
if establishes_containing_block_for_absolute(StackingContextCollectionFlags::empty(),
|
||||
fragment.style.get_box().position) {
|
||||
state.containing_block_clip_and_scroll_info = state.current_clip_and_scroll_info;
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ use block::{BlockFlow, CandidateBSizeIterator, ISizeAndMarginsComputer};
|
|||
use block::{ISizeConstraintInput, ISizeConstraintSolution};
|
||||
use context::LayoutContext;
|
||||
use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode};
|
||||
use display_list_builder::{DisplayListBuildState, EstablishContainingBlock};
|
||||
use display_list_builder::{DisplayListBuildState, StackingContextCollectionFlags};
|
||||
use display_list_builder::StackingContextCollectionState;
|
||||
use euclid::Point2D;
|
||||
use flow;
|
||||
|
@ -500,7 +500,8 @@ impl Flow for TableFlow {
|
|||
}
|
||||
|
||||
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
|
||||
self.block_flow.collect_stacking_contexts_for_block(state, EstablishContainingBlock::Yes);
|
||||
self.block_flow.collect_stacking_contexts_for_block(state,
|
||||
StackingContextCollectionFlags::empty());
|
||||
}
|
||||
|
||||
fn repair_style(&mut self, new_style: &::ServoArc<ComputedValues>) {
|
||||
|
|
|
@ -10,7 +10,7 @@ use app_units::Au;
|
|||
use block::BlockFlow;
|
||||
use context::LayoutContext;
|
||||
use display_list_builder::{BlockFlowDisplayListBuilding, DisplayListBuildState};
|
||||
use display_list_builder::{EstablishContainingBlock, StackingContextCollectionState};
|
||||
use display_list_builder::{StackingContextCollectionFlags, StackingContextCollectionState};
|
||||
use euclid::Point2D;
|
||||
use flow::{Flow, FlowClass, OpaqueFlow};
|
||||
use fragment::{Fragment, FragmentBorderBoxIterator, Overflow};
|
||||
|
@ -81,7 +81,8 @@ impl Flow for TableCaptionFlow {
|
|||
}
|
||||
|
||||
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
|
||||
self.block_flow.collect_stacking_contexts_for_block(state, EstablishContainingBlock::No);
|
||||
self.block_flow.collect_stacking_contexts_for_block(state,
|
||||
StackingContextCollectionFlags::empty());
|
||||
}
|
||||
|
||||
fn repair_style(&mut self, new_style: &::ServoArc<ComputedValues>) {
|
||||
|
|
|
@ -10,7 +10,7 @@ use app_units::Au;
|
|||
use block::{BlockFlow, ISizeAndMarginsComputer, MarginsMayCollapseFlag};
|
||||
use context::LayoutContext;
|
||||
use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode};
|
||||
use display_list_builder::{DisplayListBuildState, EstablishContainingBlock};
|
||||
use display_list_builder::{DisplayListBuildState, StackingContextCollectionFlags};
|
||||
use display_list_builder::StackingContextCollectionState;
|
||||
use euclid::{Point2D, Rect, SideOffsets2D, Size2D};
|
||||
use flow::{self, Flow, FlowClass, IS_ABSOLUTELY_POSITIONED, OpaqueFlow};
|
||||
|
@ -263,7 +263,8 @@ impl Flow for TableCellFlow {
|
|||
}
|
||||
|
||||
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
|
||||
self.block_flow.collect_stacking_contexts_for_block(state, EstablishContainingBlock::No);
|
||||
self.block_flow.collect_stacking_contexts_for_block(state,
|
||||
StackingContextCollectionFlags::empty());
|
||||
}
|
||||
|
||||
fn repair_style(&mut self, new_style: &::ServoArc<ComputedValues>) {
|
||||
|
|
|
@ -10,7 +10,7 @@ use app_units::Au;
|
|||
use block::{BlockFlow, ISizeAndMarginsComputer};
|
||||
use context::LayoutContext;
|
||||
use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode};
|
||||
use display_list_builder::{DisplayListBuildState, EstablishContainingBlock};
|
||||
use display_list_builder::{DisplayListBuildState, StackingContextCollectionFlags};
|
||||
use display_list_builder::StackingContextCollectionState;
|
||||
use euclid::Point2D;
|
||||
use flow::{self, EarlyAbsolutePositionInfo, Flow, FlowClass, ImmutableFlowUtils, OpaqueFlow};
|
||||
|
@ -477,7 +477,8 @@ impl Flow for TableRowFlow {
|
|||
}
|
||||
|
||||
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
|
||||
self.block_flow.collect_stacking_contexts_for_block(state, EstablishContainingBlock::No);
|
||||
self.block_flow.collect_stacking_contexts_for_block(state,
|
||||
StackingContextCollectionFlags::empty());
|
||||
}
|
||||
|
||||
fn repair_style(&mut self, new_style: &::ServoArc<ComputedValues>) {
|
||||
|
|
|
@ -10,7 +10,7 @@ use app_units::Au;
|
|||
use block::{BlockFlow, ISizeAndMarginsComputer};
|
||||
use context::LayoutContext;
|
||||
use display_list_builder::{BlockFlowDisplayListBuilding, DisplayListBuildState};
|
||||
use display_list_builder::{EstablishContainingBlock, StackingContextCollectionState};
|
||||
use display_list_builder::{NEVER_CREATES_CONTAINING_BLOCK, StackingContextCollectionState};
|
||||
use euclid::Point2D;
|
||||
use flow::{Flow, FlowClass, OpaqueFlow};
|
||||
use fragment::{Fragment, FragmentBorderBoxIterator, Overflow};
|
||||
|
@ -180,7 +180,7 @@ impl Flow for TableRowGroupFlow {
|
|||
}
|
||||
|
||||
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
|
||||
self.block_flow.collect_stacking_contexts_for_block(state, EstablishContainingBlock::No);
|
||||
self.block_flow.collect_stacking_contexts_for_block(state, NEVER_CREATES_CONTAINING_BLOCK);
|
||||
}
|
||||
|
||||
fn repair_style(&mut self, new_style: &::ServoArc<ComputedValues>) {
|
||||
|
|
|
@ -18,7 +18,8 @@ use block::{AbsoluteNonReplaced, BlockFlow, FloatNonReplaced, ISizeAndMarginsCom
|
|||
use block::{ISizeConstraintSolution, MarginsMayCollapseFlag};
|
||||
use context::LayoutContext;
|
||||
use display_list_builder::{BlockFlowDisplayListBuilding, DisplayListBuildState};
|
||||
use display_list_builder::{EstablishContainingBlock, StackingContextCollectionState};
|
||||
use display_list_builder::{NEVER_CREATES_CLIP_SCROLL_NODE, NEVER_CREATES_CONTAINING_BLOCK};
|
||||
use display_list_builder::StackingContextCollectionState;
|
||||
use euclid::Point2D;
|
||||
use floats::FloatKind;
|
||||
use flow::{Flow, FlowClass, ImmutableFlowUtils, INLINE_POSITION_IS_STATIC, OpaqueFlow};
|
||||
|
@ -458,7 +459,8 @@ impl Flow for TableWrapperFlow {
|
|||
}
|
||||
|
||||
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
|
||||
self.block_flow.collect_stacking_contexts_for_block(state, EstablishContainingBlock::No);
|
||||
self.block_flow.collect_stacking_contexts_for_block(
|
||||
state, NEVER_CREATES_CONTAINING_BLOCK | NEVER_CREATES_CLIP_SCROLL_NODE);
|
||||
}
|
||||
|
||||
fn repair_style(&mut self, new_style: &::ServoArc<ComputedValues>) {
|
||||
|
|
|
@ -381,6 +381,16 @@ impl<T: MallocSizeOf> MallocConditionalSizeOf for Arc<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl MallocSizeOf for smallbitvec::SmallBitVec {
|
||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
||||
if let Some(ptr) = self.heap_ptr() {
|
||||
unsafe { ops.malloc_size_of(ptr) }
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: MallocSizeOf, U> MallocSizeOf for TypedSize2D<T, U> {
|
||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
||||
let n = self.width.size_of(ops) + self.width.size_of(ops);
|
||||
|
@ -425,7 +435,3 @@ size_of_is_0!(Range<f32>, Range<f64>);
|
|||
|
||||
size_of_is_0!(app_units::Au);
|
||||
size_of_is_0!(cssparser::RGBA, cssparser::TokenSerializationType);
|
||||
|
||||
// XXX: once we upgrade smallbitvec to 1.0.4, use the new heap_ptr() method to
|
||||
// implement this properly
|
||||
size_of_is_0!(smallbitvec::SmallBitVec);
|
||||
|
|
|
@ -4750,7 +4750,7 @@ class CGProxyNamedOperation(CGProxySpecialOperation):
|
|||
def define(self):
|
||||
# Our first argument is the id we're getting.
|
||||
argName = self.arguments[0].identifier.name
|
||||
return ("let %s = string_jsid_to_string(cx, id);\n"
|
||||
return ("let %s = jsid_to_string(cx, id).expect(\"Not a string-convertible JSID?\");\n"
|
||||
"let this = UnwrapProxy(proxy);\n"
|
||||
"let this = &*this;\n" % argName +
|
||||
CGProxySpecialOperation.define(self))
|
||||
|
@ -4817,15 +4817,14 @@ class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod):
|
|||
"bool", args)
|
||||
self.descriptor = descriptor
|
||||
|
||||
# https://heycam.github.io/webidl/#LegacyPlatformObjectGetOwnProperty
|
||||
def getBody(self):
|
||||
indexedGetter = self.descriptor.operations['IndexedGetter']
|
||||
indexedSetter = self.descriptor.operations['IndexedSetter']
|
||||
|
||||
get = ""
|
||||
if indexedGetter or indexedSetter:
|
||||
if indexedGetter:
|
||||
get = "let index = get_array_index_from_id(cx, id);\n"
|
||||
|
||||
if indexedGetter:
|
||||
attrs = "JSPROP_ENUMERATE"
|
||||
if self.descriptor.operations['IndexedSetter'] is None:
|
||||
attrs += " | JSPROP_READONLY"
|
||||
|
@ -4864,11 +4863,16 @@ class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod):
|
|||
'successCode': fillDescriptor,
|
||||
'pre': 'rooted!(in(cx) let mut result_root = UndefinedValue());'
|
||||
}
|
||||
|
||||
# See the similar-looking in CGDOMJSProxyHandler_get for the spec quote.
|
||||
condition = "RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id)"
|
||||
if indexedGetter:
|
||||
condition = "index.is_none() && (%s)" % condition
|
||||
# Once we start supporting OverrideBuiltins we need to make
|
||||
# ResolveOwnProperty or EnumerateOwnProperties filter out named
|
||||
# properties that shadow prototype properties.
|
||||
namedGet = """
|
||||
if RUST_JSID_IS_STRING(id) {
|
||||
if %s {
|
||||
let mut has_on_proto = false;
|
||||
if !has_property_on_prototype(cx, proxy, id, &mut has_on_proto) {
|
||||
return false;
|
||||
|
@ -4877,7 +4881,7 @@ if RUST_JSID_IS_STRING(id) {
|
|||
%s
|
||||
}
|
||||
}
|
||||
""" % CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues), 8).define()
|
||||
""" % (condition, CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues), 8).define())
|
||||
else:
|
||||
namedGet = ""
|
||||
|
||||
|
@ -4935,12 +4939,12 @@ class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod):
|
|||
if self.descriptor.hasUnforgeableMembers:
|
||||
raise TypeError("Can't handle a named setter on an interface that has "
|
||||
"unforgeables. Figure out how that should work!")
|
||||
set += ("if RUST_JSID_IS_STRING(id) {\n" +
|
||||
set += ("if RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id) {\n" +
|
||||
CGIndenter(CGProxyNamedSetter(self.descriptor)).define() +
|
||||
" return (*opresult).succeed();\n" +
|
||||
"}\n")
|
||||
else:
|
||||
set += ("if RUST_JSID_IS_STRING(id) {\n" +
|
||||
set += ("if RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id) {\n" +
|
||||
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() +
|
||||
" if result.is_some() {\n"
|
||||
" return (*opresult).failNoNamedSetter();\n"
|
||||
|
@ -5093,9 +5097,12 @@ class CGDOMJSProxyHandler_hasOwn(CGAbstractExternMethod):
|
|||
indexed = ""
|
||||
|
||||
namedGetter = self.descriptor.operations['NamedGetter']
|
||||
condition = "RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id)"
|
||||
if indexedGetter:
|
||||
condition = "index.is_none() && (%s)" % condition
|
||||
if namedGetter:
|
||||
named = """\
|
||||
if RUST_JSID_IS_STRING(id) {
|
||||
if %s {
|
||||
let mut has_on_proto = false;
|
||||
if !has_property_on_prototype(cx, proxy, id, &mut has_on_proto) {
|
||||
return false;
|
||||
|
@ -5107,7 +5114,7 @@ if RUST_JSID_IS_STRING(id) {
|
|||
}
|
||||
}
|
||||
|
||||
""" % CGIndenter(CGProxyNamedGetter(self.descriptor), 8).define()
|
||||
""" % (condition, CGIndenter(CGProxyNamedGetter(self.descriptor), 8).define())
|
||||
else:
|
||||
named = ""
|
||||
|
||||
|
@ -5136,6 +5143,7 @@ class CGDOMJSProxyHandler_get(CGAbstractExternMethod):
|
|||
CGAbstractExternMethod.__init__(self, descriptor, "get", "bool", args)
|
||||
self.descriptor = descriptor
|
||||
|
||||
# https://heycam.github.io/webidl/#LegacyPlatformObjectGetOwnProperty
|
||||
def getBody(self):
|
||||
getFromExpando = """\
|
||||
rooted!(in(cx) let mut expando = ptr::null_mut());
|
||||
|
@ -5175,9 +5183,16 @@ if !expando.is_null() {
|
|||
|
||||
namedGetter = self.descriptor.operations['NamedGetter']
|
||||
if namedGetter:
|
||||
getNamed = ("if RUST_JSID_IS_STRING(id) {\n" +
|
||||
condition = "RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id)"
|
||||
# From step 1:
|
||||
# If O supports indexed properties and P is an array index, then:
|
||||
#
|
||||
# 3. Set ignoreNamedProps to true.
|
||||
if indexedGetter:
|
||||
condition = "index.is_none() && (%s)" % condition
|
||||
getNamed = ("if %s {\n" +
|
||||
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() +
|
||||
"}\n")
|
||||
"}\n") % condition
|
||||
else:
|
||||
getNamed = ""
|
||||
|
||||
|
@ -5633,6 +5648,7 @@ def generate_imports(config, cgthings, descriptors, callbacks=None, dictionaries
|
|||
'js::glue::GetProxyPrivate',
|
||||
'js::glue::NewProxyObject',
|
||||
'js::glue::ProxyTraps',
|
||||
'js::glue::RUST_JSID_IS_INT',
|
||||
'js::glue::RUST_JSID_IS_STRING',
|
||||
'js::glue::RUST_SYMBOL_TO_JSID',
|
||||
'js::glue::int_to_jsid',
|
||||
|
@ -5719,7 +5735,7 @@ def generate_imports(config, cgthings, descriptors, callbacks=None, dictionaries
|
|||
'dom::bindings::conversions::root_from_handleobject',
|
||||
'dom::bindings::conversions::root_from_handlevalue',
|
||||
'dom::bindings::conversions::root_from_object',
|
||||
'dom::bindings::conversions::string_jsid_to_string',
|
||||
'dom::bindings::conversions::jsid_to_string',
|
||||
'dom::bindings::codegen::PrototypeList',
|
||||
'dom::bindings::codegen::RegisterBindings',
|
||||
'dom::bindings::codegen::UnionTypes',
|
||||
|
|
|
@ -132,20 +132,6 @@ impl <T: FromJSValConvertible + JSTraceable> FromJSValConvertible for RootedTrac
|
|||
}
|
||||
}
|
||||
|
||||
/// Convert `id` to a `DOMString`, assuming it is string-valued.
|
||||
///
|
||||
/// Handling of invalid UTF-16 in strings depends on the relevant option.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `id` is not string-valued.
|
||||
pub fn string_jsid_to_string(cx: *mut JSContext, id: HandleId) -> DOMString {
|
||||
unsafe {
|
||||
assert!(RUST_JSID_IS_STRING(id));
|
||||
jsstring_to_str(cx, RUST_JSID_TO_STRING(id))
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert `id` to a `DOMString`. Returns `None` if `id` is not a string or
|
||||
/// integer.
|
||||
///
|
||||
|
|
|
@ -1004,6 +1004,10 @@ extern "C" {
|
|||
aURIString: ServoRawOffsetArc<RustString>)
|
||||
-> *mut ImageValue;
|
||||
}
|
||||
extern "C" {
|
||||
pub fn Gecko_ImageValue_SizeOfIncludingThis(aImageValue: *mut ImageValue)
|
||||
-> usize;
|
||||
}
|
||||
extern "C" {
|
||||
pub fn Gecko_SetLayerImageImageValue(image: *mut nsStyleImage,
|
||||
aImageValue: *mut ImageValue);
|
||||
|
|
|
@ -9,6 +9,7 @@ use gecko_bindings::structs::mozilla::css::URLValueData;
|
|||
use gecko_bindings::structs::root::{nsStyleImageRequest, RustString};
|
||||
use gecko_bindings::structs::root::mozilla::css::ImageValue;
|
||||
use gecko_bindings::sugar::refptr::RefPtr;
|
||||
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
|
||||
use parser::ParserContext;
|
||||
use servo_arc::{Arc, RawOffsetArc};
|
||||
use std::fmt;
|
||||
|
@ -16,22 +17,19 @@ use std::mem;
|
|||
use style_traits::{ToCss, ParseError};
|
||||
|
||||
/// A specified url() value for gecko. Gecko does not eagerly resolve SpecifiedUrls.
|
||||
#[derive(Clone, Debug, MallocSizeOf, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct SpecifiedUrl {
|
||||
/// The URL in unresolved string form.
|
||||
///
|
||||
/// Refcounted since cloning this should be cheap and data: uris can be
|
||||
/// really large.
|
||||
#[ignore_malloc_size_of = "XXX: do this once bug 1397971 lands"]
|
||||
serialization: Arc<String>,
|
||||
|
||||
/// The URL extra data.
|
||||
#[ignore_malloc_size_of = "RefPtr is tricky, and there aren't many of these in practise"]
|
||||
pub extra_data: RefPtr<URLExtraData>,
|
||||
|
||||
/// Cache ImageValue, if any, so that we can reuse it while rematching a
|
||||
/// a property with this specified url value.
|
||||
#[ignore_malloc_size_of = "XXX: do this once bug 1397971 lands"]
|
||||
pub image_value: Option<RefPtr<ImageValue>>,
|
||||
}
|
||||
trivial_to_computed_value!(SpecifiedUrl);
|
||||
|
@ -144,3 +142,25 @@ impl ToCss for SpecifiedUrl {
|
|||
dest.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
impl MallocSizeOf for SpecifiedUrl {
|
||||
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
||||
use gecko_bindings::bindings::Gecko_ImageValue_SizeOfIncludingThis;
|
||||
|
||||
let mut n = 0;
|
||||
|
||||
// XXX: measure `serialization` once bug 1397971 lands
|
||||
|
||||
// We ignore `extra_data`, because RefPtr is tricky, and there aren't
|
||||
// many of them in practise (sharing is common).
|
||||
|
||||
if let Some(ref image_value) = self.image_value {
|
||||
// Although this is a RefPtr, this is the primary reference because
|
||||
// SpecifiedUrl is responsible for creating the image_value. So we
|
||||
// measure unconditionally here.
|
||||
n += unsafe { Gecko_ImageValue_SizeOfIncludingThis(image_value.clone().get()) };
|
||||
}
|
||||
|
||||
n
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,6 +40,30 @@ pub type Filter = GenericFilter<Angle, Factor, NonNegativeLength, Impossible>;
|
|||
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
|
||||
pub struct Factor(NumberOrPercentage);
|
||||
|
||||
impl Factor {
|
||||
/// Parse this factor but clamp to one if the value is over 100%.
|
||||
#[inline]
|
||||
pub fn parse_with_clamping_to_one<'i, 't>(
|
||||
context: &ParserContext,
|
||||
input: &mut Parser<'i, 't>
|
||||
) -> Result<Self, ParseError<'i>> {
|
||||
Factor::parse(context, input).map(|v| v.clamp_to_one())
|
||||
}
|
||||
|
||||
/// Clamp the value to 1 if the value is over 100%.
|
||||
#[inline]
|
||||
fn clamp_to_one(self) -> Self {
|
||||
match self.0 {
|
||||
NumberOrPercentage::Percentage(percent) => {
|
||||
Factor(NumberOrPercentage::Percentage(percent.clamp_to_hundred()))
|
||||
},
|
||||
NumberOrPercentage::Number(number) => {
|
||||
Factor(NumberOrPercentage::Number(number.clamp_to_one()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Factor {
|
||||
#[inline]
|
||||
fn parse<'i, 't>(
|
||||
|
@ -173,12 +197,28 @@ impl Parse for Filter {
|
|||
"blur" => Ok(GenericFilter::Blur((Length::parse_non_negative(context, i)?).into())),
|
||||
"brightness" => Ok(GenericFilter::Brightness(Factor::parse(context, i)?)),
|
||||
"contrast" => Ok(GenericFilter::Contrast(Factor::parse(context, i)?)),
|
||||
"grayscale" => Ok(GenericFilter::Grayscale(Factor::parse(context, i)?)),
|
||||
"grayscale" => {
|
||||
// Values of amount over 100% are allowed but UAs must clamp the values to 1.
|
||||
// https://drafts.fxtf.org/filter-effects/#funcdef-filter-grayscale
|
||||
Ok(GenericFilter::Grayscale(Factor::parse_with_clamping_to_one(context, i)?))
|
||||
},
|
||||
"hue-rotate" => Ok(GenericFilter::HueRotate(Angle::parse(context, i)?)),
|
||||
"invert" => Ok(GenericFilter::Invert(Factor::parse(context, i)?)),
|
||||
"opacity" => Ok(GenericFilter::Opacity(Factor::parse(context, i)?)),
|
||||
"invert" => {
|
||||
// Values of amount over 100% are allowed but UAs must clamp the values to 1.
|
||||
// https://drafts.fxtf.org/filter-effects/#funcdef-filter-invert
|
||||
Ok(GenericFilter::Invert(Factor::parse_with_clamping_to_one(context, i)?))
|
||||
},
|
||||
"opacity" => {
|
||||
// Values of amount over 100% are allowed but UAs must clamp the values to 1.
|
||||
// https://drafts.fxtf.org/filter-effects/#funcdef-filter-opacity
|
||||
Ok(GenericFilter::Opacity(Factor::parse_with_clamping_to_one(context, i)?))
|
||||
},
|
||||
"saturate" => Ok(GenericFilter::Saturate(Factor::parse(context, i)?)),
|
||||
"sepia" => Ok(GenericFilter::Sepia(Factor::parse(context, i)?)),
|
||||
"sepia" => {
|
||||
// Values of amount over 100% are allowed but UAs must clamp the values to 1.
|
||||
// https://drafts.fxtf.org/filter-effects/#funcdef-filter-sepia
|
||||
Ok(GenericFilter::Sepia(Factor::parse_with_clamping_to_one(context, i)?))
|
||||
},
|
||||
"drop-shadow" => Ok(GenericFilter::DropShadow(Parse::parse(context, i)?)),
|
||||
_ => Err(ValueParseError::InvalidFilter(Token::Function(function.clone())).into()),
|
||||
}
|
||||
|
|
|
@ -217,6 +217,15 @@ impl Number {
|
|||
-> Result<Number, ParseError<'i>> {
|
||||
parse_number_with_clamping_mode(context, input, AllowedNumericType::AtLeastOne)
|
||||
}
|
||||
|
||||
/// Clamp to 1.0 if the value is over 1.0.
|
||||
#[inline]
|
||||
pub fn clamp_to_one(self) -> Self {
|
||||
Number {
|
||||
value: self.value.min(1.),
|
||||
calc_clamping_mode: self.calc_clamping_mode,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToComputedValue for Number {
|
||||
|
|
|
@ -125,6 +125,15 @@ impl Percentage {
|
|||
) -> Result<Self, ParseError<'i>> {
|
||||
Self::parse_with_clamping_mode(context, input, AllowedNumericType::NonNegative)
|
||||
}
|
||||
|
||||
/// Clamp to 100% if the value is over 100%.
|
||||
#[inline]
|
||||
pub fn clamp_to_hundred(self) -> Self {
|
||||
Percentage {
|
||||
value: self.value.min(1.),
|
||||
calc_clamping_mode: self.calc_clamping_mode,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Percentage {
|
||||
|
|
|
@ -12,6 +12,7 @@ transforms:
|
|||
|
||||
kind-dependencies:
|
||||
- repackage-signing
|
||||
- partials-signing
|
||||
|
||||
only-for-build-platforms:
|
||||
- linux-nightly/opt
|
||||
|
|
|
@ -26,3 +26,5 @@ jobs:
|
|||
symbol: I(agb)
|
||||
index-task:
|
||||
symbol: I(idx)
|
||||
funsize-update-generator:
|
||||
symbol: I(pg)
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
loader: taskgraph.loader.single_dep:loader
|
||||
|
||||
transforms:
|
||||
- taskgraph.transforms.name_sanity:transforms
|
||||
- taskgraph.transforms.partials_signing:transforms
|
||||
- taskgraph.transforms.task:transforms
|
||||
|
||||
kind-dependencies:
|
||||
- partials
|
|
@ -0,0 +1,23 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
loader: taskgraph.loader.single_dep:loader
|
||||
|
||||
transforms:
|
||||
- taskgraph.transforms.name_sanity:transforms
|
||||
- taskgraph.transforms.partials:transforms
|
||||
- taskgraph.transforms.task:transforms
|
||||
|
||||
kind-dependencies:
|
||||
- repackage-signing
|
||||
|
||||
only-for-attributes:
|
||||
- nightly
|
||||
|
||||
only-for-build-platforms:
|
||||
- macosx64-nightly/opt
|
||||
- win32-nightly/opt
|
||||
- win64-nightly/opt
|
||||
- linux-nightly/opt
|
||||
- linux64-nightly/opt
|
|
@ -25,6 +25,7 @@ RUN pip install -r /tmp/requirements.txt
|
|||
# scripts
|
||||
RUN mkdir /home/worker/bin
|
||||
COPY scripts/* /home/worker/bin/
|
||||
|
||||
COPY runme.sh /runme.sh
|
||||
COPY recompress.sh /recompress.sh
|
||||
RUN chmod 755 /home/worker/bin/* /*.sh
|
||||
|
@ -35,3 +36,5 @@ ENV HOME /home/worker
|
|||
ENV SHELL /bin/bash
|
||||
ENV USER worker
|
||||
ENV LOGNAME worker
|
||||
|
||||
CMD ["/runme.sh"]
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import absolute_import, print_function
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import ConfigParser
|
||||
import argparse
|
||||
|
@ -27,6 +31,7 @@ ALLOWED_URL_PREFIXES = [
|
|||
"http://ftp.mozilla.org/",
|
||||
"http://download.mozilla.org/",
|
||||
"https://archive.mozilla.org/",
|
||||
"https://queue.taskcluster.net/v1/task/",
|
||||
]
|
||||
|
||||
DEFAULT_FILENAME_TEMPLATE = "{appName}-{branch}-{version}-{platform}-" \
|
||||
|
@ -286,6 +291,10 @@ def main():
|
|||
# if branch not set explicitly use repo-name
|
||||
mar_data["branch"] = e.get("branch",
|
||||
mar_data["repo"].rstrip("/").split("/")[-1])
|
||||
if 'dest_mar' in e:
|
||||
mar_name = e['dest_mar']
|
||||
else:
|
||||
# default to formatted name if not specified
|
||||
mar_name = args.filename_template.format(**mar_data)
|
||||
mar_data["mar"] = mar_name
|
||||
dest_mar = os.path.join(work_env.workdir, mar_name)
|
||||
|
|
|
@ -233,3 +233,13 @@ repackage-signing
|
|||
-----------------
|
||||
Repackage-signing take the repackaged installers (windows) and update packaging (with
|
||||
the signed internal bits) and signs them.
|
||||
|
||||
partials
|
||||
--------
|
||||
Partials takes the complete.mar files produced in previous tasks and generates partial
|
||||
updates between previous nightly releases and the new one. Requires a release_history
|
||||
in the parameters. See ``mach release-history`` if doing this manually.
|
||||
|
||||
partials-signing
|
||||
----------------
|
||||
Partials-signing takes the partial updates produced in Partials and signs them.
|
||||
|
|
|
@ -107,6 +107,12 @@ syntax or reading a project-specific configuration file).
|
|||
``include_nightly``
|
||||
If true, then nightly tasks are eligible for optimization.
|
||||
|
||||
``release_history``
|
||||
History of recent releases by platform and locale, used when generating
|
||||
partial updates for nightly releases.
|
||||
Suitable contents can be generated with ``mach release-history``,
|
||||
which will print to the console by default.
|
||||
|
||||
Morphed Set
|
||||
-----------
|
||||
|
||||
|
|
|
@ -504,3 +504,23 @@ class TaskClusterImagesProvider(object):
|
|||
except Exception:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class TaskClusterPartialsData(object):
|
||||
@Command('release-history', category="ci",
|
||||
description="Query balrog for release history used by enable partials generation")
|
||||
@CommandArgument('-b', '--branch',
|
||||
help="The gecko project branch used in balrog, such as "
|
||||
"mozilla-central, release, date")
|
||||
@CommandArgument('--product', default='Firefox',
|
||||
help="The product identifier, such as 'Firefox'")
|
||||
def generate_partials_builds(self, product, branch):
|
||||
from taskgraph.util.partials import populate_release_history
|
||||
try:
|
||||
import yaml
|
||||
release_history = {'release_history': populate_release_history(product, branch)}
|
||||
print(yaml.safe_dump(release_history, allow_unicode=True, default_flow_style=False))
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
|
|
@ -18,6 +18,7 @@ from .create import create_tasks
|
|||
from .parameters import Parameters
|
||||
from .taskgraph import TaskGraph
|
||||
from .actions import render_actions_json
|
||||
from taskgraph.util.partials import populate_release_history
|
||||
from . import GECKO
|
||||
|
||||
from taskgraph.util.templates import Templates
|
||||
|
@ -107,6 +108,7 @@ def taskgraph_decision(options):
|
|||
"""
|
||||
|
||||
parameters = get_decision_parameters(options)
|
||||
|
||||
# create a TaskGraphGenerator instance
|
||||
tgg = TaskGraphGenerator(
|
||||
root_dir=options['root'],
|
||||
|
@ -202,6 +204,13 @@ def get_decision_parameters(options):
|
|||
if options.get('target_tasks_method'):
|
||||
parameters['target_tasks_method'] = options['target_tasks_method']
|
||||
|
||||
# If the target method is nightly, we should build partials. This means
|
||||
# knowing what has been released previously.
|
||||
# An empty release_history is fine, it just means no partials will be built
|
||||
parameters.setdefault('release_history', dict())
|
||||
if 'nightly' in parameters.get('target_tasks_method', ''):
|
||||
parameters['release_history'] = populate_release_history('Firefox', project)
|
||||
|
||||
return Parameters(parameters)
|
||||
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ PARAMETER_NAMES = set([
|
|||
'project',
|
||||
'pushdate',
|
||||
'pushlog_id',
|
||||
'release_history',
|
||||
'target_task_labels',
|
||||
'target_tasks_method',
|
||||
])
|
||||
|
|
|
@ -56,7 +56,7 @@ def make_task_description(config, jobs):
|
|||
dep_job = job['dependent-task']
|
||||
|
||||
treeherder = job.get('treeherder', {})
|
||||
treeherder.setdefault('symbol', 'tc-Up(N)')
|
||||
treeherder.setdefault('symbol', 'c-Up(N)')
|
||||
dep_th_platform = dep_job.task.get('extra', {}).get(
|
||||
'treeherder', {}).get('machine', {}).get('platform', '')
|
||||
treeherder.setdefault('platform',
|
||||
|
@ -66,11 +66,14 @@ def make_task_description(config, jobs):
|
|||
|
||||
attributes = copy_attributes_from_dependent_job(dep_job)
|
||||
|
||||
treeherder_job_symbol = dep_job.attributes.get('locale', 'N')
|
||||
|
||||
if dep_job.attributes.get('locale'):
|
||||
treeherder['symbol'] = 'tc-Up({})'.format(dep_job.attributes.get('locale'))
|
||||
treeherder['symbol'] = 'c-Up({})'.format(treeherder_job_symbol)
|
||||
attributes['locale'] = dep_job.attributes.get('locale')
|
||||
|
||||
label = job['label']
|
||||
|
||||
description = (
|
||||
"Balrog submission for locale '{locale}' for build '"
|
||||
"{build_platform}/{build_type}'".format(
|
||||
|
@ -94,7 +97,6 @@ def make_task_description(config, jobs):
|
|||
task = {
|
||||
'label': label,
|
||||
'description': description,
|
||||
# do we have to define worker type somewhere?
|
||||
'worker-type': 'scriptworker-prov-v1/balrogworker-v1',
|
||||
'worker': {
|
||||
'implementation': 'balrog',
|
||||
|
|
|
@ -9,6 +9,9 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||
|
||||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.attributes import copy_attributes_from_dependent_job
|
||||
from taskgraph.util.partials import (get_balrog_platform_name,
|
||||
get_partials_artifacts,
|
||||
get_partials_artifact_map)
|
||||
from taskgraph.util.schema import validate_schema, Schema
|
||||
from taskgraph.util.scriptworker import (get_beetmover_bucket_scope,
|
||||
get_beetmover_action_scope)
|
||||
|
@ -215,6 +218,14 @@ def make_task_description(config, jobs):
|
|||
}
|
||||
dependencies.update(repackage_dependencies)
|
||||
|
||||
# If this isn't a direct dependency, it won't be in there.
|
||||
if 'repackage-signing' not in dependencies:
|
||||
repackage_signing_name = "repackage-signing"
|
||||
repackage_signing_deps = {"repackage-signing":
|
||||
dep_job.dependencies[repackage_signing_name]
|
||||
}
|
||||
dependencies.update(repackage_signing_deps)
|
||||
|
||||
attributes = copy_attributes_from_dependent_job(dep_job)
|
||||
if job.get('locale'):
|
||||
attributes['locale'] = job['locale']
|
||||
|
@ -273,7 +284,6 @@ def generate_upstream_artifacts(build_task_ref, build_signing_task_ref,
|
|||
_check_platform_matched_only_one_regex(
|
||||
tasktype, platform, plarform_was_previously_matched_by_regex, platform_regex
|
||||
)
|
||||
|
||||
upstream_artifacts.append({
|
||||
"taskId": {"task-reference": ref},
|
||||
"taskType": tasktype,
|
||||
|
@ -285,6 +295,23 @@ def generate_upstream_artifacts(build_task_ref, build_signing_task_ref,
|
|||
return upstream_artifacts
|
||||
|
||||
|
||||
def generate_partials_upstream_artifacts(artifacts, platform, locale=None):
|
||||
if not locale or locale == 'en-US':
|
||||
artifact_prefix = 'public/build'
|
||||
else:
|
||||
artifact_prefix = 'public/build/{}'.format(locale)
|
||||
|
||||
upstream_artifacts = [{
|
||||
'taskId': {'task-reference': '<partials-signing>'},
|
||||
'taskType': 'signing',
|
||||
'paths': ["{}/{}".format(artifact_prefix, p)
|
||||
for p in artifacts],
|
||||
'locale': locale or 'en-US',
|
||||
}]
|
||||
|
||||
return upstream_artifacts
|
||||
|
||||
|
||||
def _check_platform_matched_only_one_regex(
|
||||
task_type, platform, plarform_was_previously_matched_by_regex, platform_regex
|
||||
):
|
||||
|
@ -299,8 +326,12 @@ least 2 regular expressions. First matched: "{first_matched}". Second matched: \
|
|||
|
||||
|
||||
def is_valid_beetmover_job(job):
|
||||
# windows builds don't have docker-image, so fewer dependencies
|
||||
if any(b in job['attributes']['build_platform'] for b in _WINDOWS_BUILD_PLATFORMS):
|
||||
# beetmover after partials-signing should have six dependencies.
|
||||
# windows builds w/o partials don't have docker-image, so fewer
|
||||
# dependencies
|
||||
if 'partials-signing' in job['dependencies'].keys():
|
||||
expected_dep_count = 6
|
||||
elif any(b in job['attributes']['build_platform'] for b in _WINDOWS_BUILD_PLATFORMS):
|
||||
expected_dep_count = 4
|
||||
else:
|
||||
expected_dep_count = 5
|
||||
|
@ -321,6 +352,7 @@ def make_task_worker(config, jobs):
|
|||
build_signing_task = None
|
||||
repackage_task = None
|
||||
repackage_signing_task = None
|
||||
|
||||
for dependency in job["dependencies"].keys():
|
||||
if 'repackage-signing' in dependency:
|
||||
repackage_signing_task = dependency
|
||||
|
@ -348,3 +380,57 @@ def make_task_worker(config, jobs):
|
|||
job["worker"] = worker
|
||||
|
||||
yield job
|
||||
|
||||
|
||||
@transforms.add
|
||||
def make_partials_artifacts(config, jobs):
|
||||
for job in jobs:
|
||||
locale = job["attributes"].get("locale")
|
||||
if not locale:
|
||||
locale = 'en-US'
|
||||
|
||||
# Remove when proved reliable
|
||||
# job['treeherder']['tier'] = 3
|
||||
|
||||
platform = job["attributes"]["build_platform"]
|
||||
|
||||
balrog_platform = get_balrog_platform_name(platform)
|
||||
|
||||
artifacts = get_partials_artifacts(config.params.get('release_history'),
|
||||
balrog_platform, locale)
|
||||
|
||||
# Dependency: | repackage-signing | partials-signing
|
||||
# Partials artifacts | Skip | Populate & yield
|
||||
# No partials | Yield | continue
|
||||
if len(artifacts) == 0:
|
||||
if 'partials-signing' in job['dependencies']:
|
||||
continue
|
||||
else:
|
||||
yield job
|
||||
continue
|
||||
else:
|
||||
if 'partials-signing' not in job['dependencies']:
|
||||
continue
|
||||
|
||||
upstream_artifacts = generate_partials_upstream_artifacts(
|
||||
artifacts, balrog_platform, locale
|
||||
)
|
||||
|
||||
job['worker']['upstream-artifacts'].extend(upstream_artifacts)
|
||||
|
||||
extra = list()
|
||||
|
||||
artifact_map = get_partials_artifact_map(
|
||||
config.params.get('release_history'), balrog_platform, locale)
|
||||
for artifact in artifact_map:
|
||||
extra.append({
|
||||
'locale': locale,
|
||||
'artifact_name': artifact,
|
||||
'buildid': artifact_map[artifact],
|
||||
'platform': balrog_platform,
|
||||
})
|
||||
|
||||
job.setdefault('extra', {})
|
||||
job['extra']['partials'] = extra
|
||||
|
||||
yield job
|
||||
|
|
|
@ -0,0 +1,137 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
"""
|
||||
Transform the partials task into an actual task description.
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.attributes import copy_attributes_from_dependent_job
|
||||
from taskgraph.util.partials import get_balrog_platform_name, get_builds
|
||||
from taskgraph.util.taskcluster import get_taskcluster_artifact_prefix
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
transforms = TransformSequence()
|
||||
|
||||
|
||||
def _generate_task_output_files(filenames, locale=None):
|
||||
locale_output_path = '{}/'.format(locale) if locale else ''
|
||||
|
||||
data = list()
|
||||
for filename in filenames:
|
||||
data.append({
|
||||
'type': 'file',
|
||||
'path': '/home/worker/artifacts/{}'.format(filename),
|
||||
'name': 'public/build/{}{}'.format(locale_output_path, filename)
|
||||
})
|
||||
data.append({
|
||||
'type': 'file',
|
||||
'path': '/home/worker/artifacts/manifest.json',
|
||||
'name': 'public/build/{}manifest.json'.format(locale_output_path)
|
||||
})
|
||||
return data
|
||||
|
||||
|
||||
@transforms.add
|
||||
def make_task_description(config, jobs):
|
||||
# If no balrog release history, then don't generate partials
|
||||
if not config.params.get('release_history'):
|
||||
return
|
||||
for job in jobs:
|
||||
dep_job = job['dependent-task']
|
||||
|
||||
treeherder = job.get('treeherder', {})
|
||||
treeherder.setdefault('symbol', 'p(N)')
|
||||
|
||||
label = job.get('label', "partials-{}".format(dep_job.label))
|
||||
dep_th_platform = dep_job.task.get('extra', {}).get(
|
||||
'treeherder', {}).get('machine', {}).get('platform', '')
|
||||
|
||||
treeherder.setdefault('platform',
|
||||
"{}/opt".format(dep_th_platform))
|
||||
treeherder.setdefault('kind', 'build')
|
||||
treeherder.setdefault('tier', 1)
|
||||
|
||||
dependent_kind = str(dep_job.kind)
|
||||
dependencies = {dependent_kind: dep_job.label}
|
||||
signing_dependencies = dep_job.dependencies
|
||||
# This is so we get the build task etc in our dependencies to
|
||||
# have better beetmover support.
|
||||
dependencies.update(signing_dependencies)
|
||||
|
||||
attributes = copy_attributes_from_dependent_job(dep_job)
|
||||
locale = dep_job.attributes.get('locale')
|
||||
if locale:
|
||||
attributes['locale'] = locale
|
||||
treeherder['symbol'] = "p({})".format(locale)
|
||||
|
||||
build_locale = locale or 'en-US'
|
||||
|
||||
builds = get_builds(config.params['release_history'], dep_th_platform,
|
||||
build_locale)
|
||||
|
||||
# If the list is empty there's no available history for this platform
|
||||
# and locale combination, so we can't build any partials.
|
||||
if not builds:
|
||||
continue
|
||||
|
||||
signing_task = None
|
||||
for dependency in sorted(dependencies.keys()):
|
||||
if 'repackage-signing' in dependency:
|
||||
signing_task = dependency
|
||||
break
|
||||
signing_task_ref = '<{}>'.format(signing_task)
|
||||
|
||||
extra = {'funsize': {'partials': list()}}
|
||||
update_number = 1
|
||||
artifact_path = "{}{}".format(
|
||||
get_taskcluster_artifact_prefix(signing_task_ref, locale=locale),
|
||||
'target.complete.mar'
|
||||
)
|
||||
for build in builds:
|
||||
extra['funsize']['partials'].append({
|
||||
'locale': build_locale,
|
||||
'from_mar': builds[build]['mar_url'],
|
||||
'to_mar': {'task-reference': artifact_path},
|
||||
'platform': get_balrog_platform_name(dep_th_platform),
|
||||
'branch': config.params['project'],
|
||||
'update_number': update_number,
|
||||
'dest_mar': build,
|
||||
})
|
||||
update_number += 1
|
||||
|
||||
cot = extra.setdefault('chainOfTrust', {})
|
||||
cot.setdefault('inputs', {})['docker-image'] = {"task-reference": "<docker-image>"}
|
||||
|
||||
worker = {
|
||||
'artifacts': _generate_task_output_files(builds.keys(), locale),
|
||||
'implementation': 'docker-worker',
|
||||
'docker-image': {'in-tree': 'funsize-update-generator'},
|
||||
'os': 'linux',
|
||||
'max-run-time': 3600,
|
||||
'chain-of-trust': True,
|
||||
'env': {
|
||||
'SHA1_SIGNING_CERT': 'nightly_sha1',
|
||||
'SHA384_SIGNING_CERT': 'nightly_sha384'
|
||||
}
|
||||
}
|
||||
|
||||
level = config.params['level']
|
||||
|
||||
task = {
|
||||
'label': label,
|
||||
'description': "{} Partials".format(
|
||||
dep_job.task["metadata"]["description"]),
|
||||
'worker-type': 'aws-provisioner-v1/gecko-%s-b-linux' % level,
|
||||
'dependencies': dependencies,
|
||||
'attributes': attributes,
|
||||
'run-on-projects': dep_job.attributes.get('run_on_projects'),
|
||||
'treeherder': treeherder,
|
||||
'extra': extra,
|
||||
'worker': worker,
|
||||
}
|
||||
|
||||
yield task
|
|
@ -0,0 +1,96 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
"""
|
||||
Transform the partials task into an actual task description.
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.attributes import copy_attributes_from_dependent_job
|
||||
from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
|
||||
from taskgraph.util.partials import get_balrog_platform_name, get_partials_artifacts
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
transforms = TransformSequence()
|
||||
|
||||
|
||||
def generate_upstream_artifacts(release_history, platform, locale=None):
|
||||
artifact_prefix = 'public/build'
|
||||
if locale:
|
||||
artifact_prefix = 'public/build/{}'.format(locale)
|
||||
else:
|
||||
locale = 'en-US'
|
||||
|
||||
artifacts = get_partials_artifacts(release_history, platform, locale)
|
||||
|
||||
upstream_artifacts = [{
|
||||
"taskId": {"task-reference": '<partials>'},
|
||||
"taskType": 'partials',
|
||||
"paths": ["{}/{}".format(artifact_prefix, p)
|
||||
for p in artifacts],
|
||||
"formats": ["mar_sha384"],
|
||||
}]
|
||||
|
||||
return upstream_artifacts
|
||||
|
||||
|
||||
@transforms.add
|
||||
def make_task_description(config, jobs):
|
||||
for job in jobs:
|
||||
dep_job = job['dependent-task']
|
||||
|
||||
treeherder = job.get('treeherder', {})
|
||||
treeherder.setdefault('symbol', 'ps(N)')
|
||||
|
||||
dep_th_platform = dep_job.task.get('extra', {}).get(
|
||||
'treeherder', {}).get('machine', {}).get('platform', '')
|
||||
label = job.get('label', "partials-signing-{}".format(dep_job.label))
|
||||
dep_th_platform = dep_job.task.get('extra', {}).get(
|
||||
'treeherder', {}).get('machine', {}).get('platform', '')
|
||||
treeherder.setdefault('platform',
|
||||
"{}/opt".format(dep_th_platform))
|
||||
treeherder.setdefault('kind', 'build')
|
||||
treeherder.setdefault('tier', 1)
|
||||
|
||||
dependent_kind = str(dep_job.kind)
|
||||
dependencies = {dependent_kind: dep_job.label}
|
||||
signing_dependencies = dep_job.dependencies
|
||||
# This is so we get the build task etc in our dependencies to
|
||||
# have better beetmover support.
|
||||
dependencies.update(signing_dependencies)
|
||||
|
||||
attributes = copy_attributes_from_dependent_job(dep_job)
|
||||
locale = dep_job.attributes.get('locale')
|
||||
if locale:
|
||||
attributes['locale'] = locale
|
||||
treeherder['symbol'] = 'ps({})'.format(locale)
|
||||
|
||||
balrog_platform = get_balrog_platform_name(dep_th_platform)
|
||||
upstream_artifacts = generate_upstream_artifacts(
|
||||
config.params['release_history'], balrog_platform, locale)
|
||||
|
||||
build_platform = dep_job.attributes.get('build_platform')
|
||||
is_nightly = dep_job.attributes.get('nightly')
|
||||
signing_cert_scope = get_signing_cert_scope_per_platform(
|
||||
build_platform, is_nightly, config
|
||||
)
|
||||
scopes = [signing_cert_scope, 'project:releng:signing:format:mar_sha384']
|
||||
task = {
|
||||
'label': label,
|
||||
'description': "{} Partials".format(
|
||||
dep_job.task["metadata"]["description"]),
|
||||
'worker-type': 'scriptworker-prov-v1/signing-linux-v1',
|
||||
'worker': {'implementation': 'scriptworker-signing',
|
||||
'upstream-artifacts': upstream_artifacts,
|
||||
'max-run-time': 3600},
|
||||
'dependencies': dependencies,
|
||||
'attributes': attributes,
|
||||
'scopes': scopes,
|
||||
'run-on-projects': dep_job.attributes.get('run_on_projects'),
|
||||
'treeherder': treeherder,
|
||||
}
|
||||
|
||||
yield task
|
|
@ -10,12 +10,10 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.attributes import copy_attributes_from_dependent_job
|
||||
from taskgraph.util.schema import validate_schema, Schema
|
||||
from taskgraph.util.taskcluster import get_taskcluster_artifact_prefix
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
_TC_ARTIFACT_LOCATION = \
|
||||
'https://queue.taskcluster.net/v1/task/{task_id}/artifacts/public/build/{postfix}'
|
||||
|
||||
transforms = TransformSequence()
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
|
@ -203,8 +201,8 @@ def _generate_task_mozharness_config(build_platform):
|
|||
|
||||
|
||||
def _generate_task_env(build_platform, build_task_ref, signing_task_ref, locale=None):
|
||||
mar_prefix = _generate_taskcluster_prefix(build_task_ref, postfix='host/bin/', locale=None)
|
||||
signed_prefix = _generate_taskcluster_prefix(signing_task_ref, locale=locale)
|
||||
mar_prefix = get_taskcluster_artifact_prefix(build_task_ref, postfix='host/bin/', locale=None)
|
||||
signed_prefix = get_taskcluster_artifact_prefix(signing_task_ref, locale=locale)
|
||||
|
||||
if build_platform.startswith('linux') or build_platform.startswith('macosx'):
|
||||
tarball_extension = 'bz2' if build_platform.startswith('linux') else 'gz'
|
||||
|
@ -231,13 +229,6 @@ def _generate_task_env(build_platform, build_task_ref, signing_task_ref, locale=
|
|||
raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
|
||||
|
||||
|
||||
def _generate_taskcluster_prefix(task_id, postfix='', locale=None):
|
||||
if locale:
|
||||
postfix = '{}/{}'.format(locale, postfix)
|
||||
|
||||
return _TC_ARTIFACT_LOCATION.format(task_id=task_id, postfix=postfix)
|
||||
|
||||
|
||||
def _generate_task_output_files(build_platform, locale=None):
|
||||
locale_output_path = '{}/'.format(locale) if locale else ''
|
||||
|
||||
|
|
|
@ -129,20 +129,6 @@ def make_repackage_signing_description(config, jobs):
|
|||
'treeherder': treeherder,
|
||||
}
|
||||
|
||||
funsize_platforms = [
|
||||
'linux-nightly',
|
||||
'linux64-nightly',
|
||||
'macosx64-nightly',
|
||||
'win32-nightly',
|
||||
'win64-nightly'
|
||||
]
|
||||
if build_platform in funsize_platforms and is_nightly:
|
||||
route_template = "project.releng.funsize.level-{level}.{project}"
|
||||
task['routes'] = [
|
||||
route_template.format(project=config.params['project'],
|
||||
level=config.params['level'])
|
||||
]
|
||||
|
||||
yield task
|
||||
|
||||
|
||||
|
|
|
@ -515,7 +515,7 @@ GROUP_NAMES = {
|
|||
'tc-L10n-Rpk': 'Localized Repackaged Repacks executed by Taskcluster',
|
||||
'tc-BM-L10n': 'Beetmover for locales executed by Taskcluster',
|
||||
'tc-BMR-L10n': 'Beetmover repackages for locales executed by Taskcluster',
|
||||
'tc-Up': 'Balrog submission of updates, executed by Taskcluster',
|
||||
'c-Up': 'Balrog submission of complete updates',
|
||||
'tc-cs': 'Checksum signing executed by Taskcluster',
|
||||
'tc-rs': 'Repackage signing executed by Taskcluster',
|
||||
'tc-BMcs': 'Beetmover checksums, executed by Taskcluster',
|
||||
|
@ -528,7 +528,10 @@ GROUP_NAMES = {
|
|||
'TW64': 'Toolchain builds for Windows 64-bits',
|
||||
'SM-tc': 'Spidermonkey builds',
|
||||
'pub': 'APK publishing',
|
||||
'p': 'Partial generation',
|
||||
'ps': 'Partials signing',
|
||||
}
|
||||
|
||||
UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
|
||||
|
||||
V2_ROUTE_TEMPLATES = [
|
||||
|
|
|
@ -0,0 +1,193 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import requests
|
||||
import redo
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BALROG_API_ROOT = 'https://aus5.mozilla.org/api/v1'
|
||||
|
||||
PLATFORM_RENAMES = {
|
||||
'windows2012-32': 'win32',
|
||||
'windows2012-64': 'win64',
|
||||
'osx-cross': 'macosx64',
|
||||
}
|
||||
|
||||
BALROG_PLATFORM_MAP = {
|
||||
"linux": [
|
||||
"Linux_x86-gcc3"
|
||||
],
|
||||
"linux64": [
|
||||
"Linux_x86_64-gcc3"
|
||||
],
|
||||
"macosx64": [
|
||||
"Darwin_x86_64-gcc3-u-i386-x86_64",
|
||||
"Darwin_x86-gcc3-u-i386-x86_64",
|
||||
"Darwin_x86-gcc3",
|
||||
"Darwin_x86_64-gcc3"
|
||||
],
|
||||
"win32": [
|
||||
"WINNT_x86-msvc",
|
||||
"WINNT_x86-msvc-x86",
|
||||
"WINNT_x86-msvc-x64"
|
||||
],
|
||||
"win64": [
|
||||
"WINNT_x86_64-msvc",
|
||||
"WINNT_x86_64-msvc-x64"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def get_balrog_platform_name(platform):
|
||||
"""Convert build platform names into balrog platform names"""
|
||||
if '-nightly' in platform:
|
||||
platform = platform.replace('-nightly', '')
|
||||
if '-devedition' in platform:
|
||||
platform = platform.replace('-devedition', '')
|
||||
return PLATFORM_RENAMES.get(platform, platform)
|
||||
|
||||
|
||||
def _sanitize_platform(platform):
|
||||
platform = get_balrog_platform_name(platform)
|
||||
if platform not in BALROG_PLATFORM_MAP:
|
||||
return platform
|
||||
return BALROG_PLATFORM_MAP[platform][0]
|
||||
|
||||
|
||||
def get_builds(release_history, platform, locale):
|
||||
"""Examine cached balrog release history and return the list of
|
||||
builds we need to generate diffs from"""
|
||||
platform = _sanitize_platform(platform)
|
||||
return release_history.get(platform, {}).get(locale, {})
|
||||
|
||||
|
||||
def get_partials_artifacts(release_history, platform, locale):
|
||||
platform = _sanitize_platform(platform)
|
||||
return release_history.get(platform, {}).get(locale, {}).keys()
|
||||
|
||||
|
||||
def get_partials_artifact_map(release_history, platform, locale):
|
||||
platform = _sanitize_platform(platform)
|
||||
return {k: release_history[platform][locale][k]['buildid']
|
||||
for k in release_history.get(platform, {}).get(locale, {})}
|
||||
|
||||
|
||||
def _retry_on_http_errors(url, verify, params, errors):
|
||||
if params:
|
||||
params_str = "&".join("=".join([k, str(v)])
|
||||
for k, v in params.iteritems())
|
||||
else:
|
||||
params_str = ''
|
||||
logger.info("Connecting to %s?%s", url, params_str)
|
||||
for _ in redo.retrier(sleeptime=5, max_sleeptime=30, attempts=10):
|
||||
try:
|
||||
req = requests.get(url, verify=verify, params=params, timeout=4)
|
||||
req.raise_for_status()
|
||||
return req
|
||||
except requests.HTTPError as e:
|
||||
if e.response.status_code in errors:
|
||||
logger.exception("Got HTTP %s trying to reach %s",
|
||||
e.response.status_code, url)
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def get_sorted_releases(product, branch):
|
||||
"""Returns a list of release names from Balrog.
|
||||
:param product: product name, AKA appName
|
||||
:param branch: branch name, e.g. mozilla-central
|
||||
:return: a sorted list of release names, most recent first.
|
||||
"""
|
||||
url = "{}/releases".format(BALROG_API_ROOT)
|
||||
params = {
|
||||
"product": product,
|
||||
# Adding -nightly-2 (2 stands for the beginning of build ID
|
||||
# based on date) should filter out release and latest blobs.
|
||||
# This should be changed to -nightly-3 in 3000 ;)
|
||||
"name_prefix": "{}-{}-nightly-2".format(product, branch),
|
||||
"names_only": True
|
||||
}
|
||||
req = _retry_on_http_errors(
|
||||
url=url, verify=True, params=params,
|
||||
errors=[500])
|
||||
releases = req.json()["names"]
|
||||
releases = sorted(releases, reverse=True)
|
||||
return releases
|
||||
|
||||
|
||||
def get_release_builds(release):
|
||||
url = "{}/releases/{}".format(BALROG_API_ROOT, release)
|
||||
req = _retry_on_http_errors(
|
||||
url=url, verify=True, params=None,
|
||||
errors=[500])
|
||||
return req.json()
|
||||
|
||||
|
||||
def populate_release_history(product, branch, maxbuilds=4, maxsearch=10):
|
||||
"""Find relevant releases in Balrog
|
||||
Not all releases have all platforms and locales, due
|
||||
to Taskcluster migration.
|
||||
|
||||
Args:
|
||||
product (str): capitalized product name, AKA appName, e.g. Firefox
|
||||
branch (str): branch name (mozilla-central)
|
||||
maxbuilds (int): Maximum number of historical releases to populate
|
||||
maxsearch(int): Traverse at most this many releases, to avoid
|
||||
working through the entire history.
|
||||
Returns:
|
||||
json object based on data from balrog api
|
||||
|
||||
results = {
|
||||
'platform1': {
|
||||
'locale1': {
|
||||
'buildid1': mar_url,
|
||||
'buildid2': mar_url,
|
||||
'buildid3': mar_url,
|
||||
},
|
||||
'locale2': {
|
||||
'target.partial-1.mar': {'buildid1': 'mar_url'},
|
||||
}
|
||||
},
|
||||
'platform2': {
|
||||
}
|
||||
}
|
||||
"""
|
||||
last_releases = get_sorted_releases(product, branch)
|
||||
|
||||
partial_mar_tmpl = 'target.partial-{}.mar'
|
||||
|
||||
builds = dict()
|
||||
for release in last_releases[:maxsearch]:
|
||||
# maxbuilds in all categories, don't make any more queries
|
||||
full = len(builds) > 0 and all(
|
||||
len(builds[platform][locale]) >= maxbuilds
|
||||
for platform in builds for locale in builds[platform])
|
||||
if full:
|
||||
break
|
||||
history = get_release_builds(release)
|
||||
|
||||
for platform in history['platforms']:
|
||||
if 'alias' in history['platforms'][platform]:
|
||||
continue
|
||||
if platform not in builds:
|
||||
builds[platform] = dict()
|
||||
for locale in history['platforms'][platform]['locales']:
|
||||
if locale not in builds[platform]:
|
||||
builds[platform][locale] = dict()
|
||||
if len(builds[platform][locale]) >= maxbuilds:
|
||||
continue
|
||||
buildid = history['platforms'][platform]['locales'][locale]['buildID']
|
||||
url = history['platforms'][platform]['locales'][locale]['completes'][0]['fileUrl']
|
||||
nextkey = len(builds[platform][locale]) + 1
|
||||
builds[platform][locale][partial_mar_tmpl.format(nextkey)] = {
|
||||
'buildid': buildid,
|
||||
'mar_url': url,
|
||||
}
|
||||
return builds
|
|
@ -13,6 +13,9 @@ from mozbuild.util import memoize
|
|||
from requests.packages.urllib3.util.retry import Retry
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
_TC_ARTIFACT_LOCATION = \
|
||||
'https://queue.taskcluster.net/v1/task/{task_id}/artifacts/public/build/{postfix}'
|
||||
|
||||
|
||||
@memoize
|
||||
def get_session():
|
||||
|
@ -101,3 +104,10 @@ def get_task_url(task_id, use_proxy=False):
|
|||
def get_task_definition(task_id, use_proxy=False):
|
||||
response = _do_request(get_task_url(task_id, use_proxy))
|
||||
return response.json()
|
||||
|
||||
|
||||
def get_taskcluster_artifact_prefix(task_id, postfix='', locale=None):
|
||||
if locale:
|
||||
postfix = '{}/{}'.format(locale, postfix)
|
||||
|
||||
return _TC_ARTIFACT_LOCATION.format(task_id=task_id, postfix=postfix)
|
||||
|
|
|
@ -0,0 +1,189 @@
|
|||
Releasing geckodriver
|
||||
=====================
|
||||
|
||||
Releasing geckodriver is not as easy as it once used to be when the
|
||||
project’s canonical home was on GitHub. Today geckodriver is hosted
|
||||
in [mozilla-central], and whilst we do want to make future releases
|
||||
from [Mozilla’s CI infrastructure], we are currently in between two
|
||||
worlds: development happens in m-c, but releases continue to be made
|
||||
from GitHub using Travis.
|
||||
|
||||
The reason for this is that we do not compile geckodriver for all
|
||||
our target platforms, that Rust cross-compilation on TaskCluster
|
||||
builders is somewhat broken, and that tests are not run in automation.
|
||||
We intend to fix all these problems.
|
||||
|
||||
In any case, the steps to release geckodriver are as follows:
|
||||
|
||||
[mozilla-central]: https://hg.mozilla.org/mozilla-central/
|
||||
[Mozilla’s CI infrastructure]: https://treeherder.mozilla.org/
|
||||
|
||||
|
||||
Release new webdriver crate
|
||||
---------------------------
|
||||
|
||||
geckodriver depends on the [webdriver] crate, also hosted in
|
||||
mozilla-central, by pointing to its in-tree relative path:
|
||||
|
||||
[dependencies]
|
||||
webdriver = { path = "../webdriver" }
|
||||
|
||||
Because we need to export the geckodriver source code to the old GitHub
|
||||
repository in order to release, we need to publish any changes that
|
||||
have been made to webdriver in the interim. If no changes have been
|
||||
made, you can skip these steps:
|
||||
|
||||
1. Bump the version number in testing/webdriver/Cargo.toml
|
||||
2. `cargo publish`
|
||||
|
||||
[webdriver]: ../webdriver
|
||||
|
||||
|
||||
Update the change log
|
||||
---------------------
|
||||
|
||||
Notable changes to geckodriver are mentioned in [CHANGES.md]. Many
|
||||
users rely on this, so it’s important that you make it **relevant
|
||||
to end-users**. For example, we only mention changes that are visible
|
||||
to users. The change log is not a complete anthology of commits,
|
||||
as these often will not convey the essence of a change to end-users.
|
||||
If a feature was added but removed before release, there is no reason
|
||||
to list it as a change.
|
||||
|
||||
It is good practice to also include relevant information from the
|
||||
[webdriver] and [rust-mozrunner] crates, since these are the two most
|
||||
important dependencies of geckodriver and a lot of its functionality
|
||||
is implemented there.
|
||||
|
||||
We follow the writing style of the existing change log, with
|
||||
one section per version (with a release date), with subsections
|
||||
‘Added’, ‘Changed’, and ‘Removed’. If the targetted
|
||||
Firefox or Selenium versions have changed, it is good to make a
|
||||
mention of this. Lines are optimally formatted at roughly 72 columns
|
||||
to make the file readable in a text editor as well as rendered HTML.
|
||||
fmt(1) does a splendid job at text formatting.
|
||||
|
||||
[CHANGES.md]: ../CHANGES.md
|
||||
[webdriver]: ../../webdriver
|
||||
[rust-mozrunner]: https://github.com/jgraham/rust_mozrunner
|
||||
|
||||
|
||||
Update libraries
|
||||
----------------
|
||||
|
||||
Make relevant changes to [Cargo.toml] to upgrade dependencies, then run
|
||||
|
||||
% ./mach vendor rust
|
||||
% ./mach build testing/geckodriver
|
||||
|
||||
to pull down and vendor the upgraded libraries. Remember to check
|
||||
in the [Cargo.lock] file, since unlike we want geckodriver builds to
|
||||
be reproducible.
|
||||
|
||||
Updating dependencies should always be made as a separate commit to
|
||||
not confuse reviewers because vendoring involves checking in a lot
|
||||
of extra code reviewed downstream.
|
||||
|
||||
[Cargo.toml]: ../Cargo.toml
|
||||
[Cargo.lock]: ../Cargo.lock
|
||||
|
||||
|
||||
Bump the version number
|
||||
-----------------------
|
||||
|
||||
Bump the version number in [Cargo.toml] to the next version.
|
||||
geckodriver follows [semantic versioning] so it’s a good idea to
|
||||
familiarise yourself wih that before deciding on the version number.
|
||||
|
||||
After you’ve changed the version number, run
|
||||
|
||||
% ./mach build testing/geckodriver
|
||||
|
||||
again to update [Cargo.lock], and check in the file.
|
||||
|
||||
[semantic versioning]: http://semver.org/
|
||||
|
||||
|
||||
Export to GitHub
|
||||
----------------
|
||||
|
||||
The canonical GitHub repository is
|
||||
|
||||
https://github.com/mozilla/geckodriver.git
|
||||
|
||||
so make sure you have a local clone of that. It has three branches:
|
||||
_master_ which only contains the [README.md]; _old_ which was the
|
||||
state of the project when it was exported to mozilla-central; and
|
||||
_release_, from where releases are made. We will export the contents
|
||||
of [testing/geckodriver] to the latter branch:
|
||||
|
||||
% cd $SRC/geckodriver
|
||||
% git checkout release
|
||||
% git rm -rf .
|
||||
% git clean -fxd
|
||||
% cp -r $SRC/gecko/testing/geckodriver/* .
|
||||
% git add .
|
||||
% git commit -am "import of vX.Y.Z"
|
||||
|
||||
[README]: ../README.md
|
||||
[testing/geckodriver]: ../
|
||||
|
||||
|
||||
Manually change `webdriver` dependency
|
||||
--------------------------------------
|
||||
|
||||
After the source code has been imported we need to change the
|
||||
dependency information for the [webdriver] crate. As explained
|
||||
previously geckodriver depends on a relative path in in the
|
||||
mozilla-central repository to build with the latest unreleased
|
||||
source code.
|
||||
|
||||
This relative path does not exist in the GitHub repository and the
|
||||
build will fail unless we change it to the latest [webdriver] crate
|
||||
version from crates.io. That version will either be the crate you
|
||||
published earlier, or the latest version available if no changes have
|
||||
been made to it since the last geckodriver release.
|
||||
|
||||
|
||||
Tag the release
|
||||
---------------
|
||||
|
||||
Run the following command:
|
||||
|
||||
% git tag -a 'vX.Y.Z'
|
||||
|
||||
Write the following in the annotation:
|
||||
|
||||
Tagging release vX.Y.Z
|
||||
|
||||
|
||||
Make the release
|
||||
----------------
|
||||
|
||||
geckodriver is released and automatically uploaded from Travis by
|
||||
pushing a new version tag to the _release_ branch:
|
||||
|
||||
% git push
|
||||
% git push --tags
|
||||
|
||||
|
||||
Update the release description
|
||||
------------------------------
|
||||
|
||||
Copy the raw Markdown source from [CHANGES.md] into the description
|
||||
of the [latest release]. This will highlight for end-users what
|
||||
changes were made in that particular package when they visit the
|
||||
GitHub downloads section.
|
||||
|
||||
Congratulations! You’ve released geckodriver!
|
||||
|
||||
[latest release]: https://github.com/mozilla/geckodriver/releases
|
||||
|
||||
|
||||
Future work
|
||||
-----------
|
||||
|
||||
In the future, we intend to [sign releases] so that they are
|
||||
verifiable.
|
||||
|
||||
[sign releases]: https://github.com/mozilla/geckodriver/issues/292
|
|
@ -39,7 +39,7 @@ class TestCommandLineArguments(MarionetteTestCase):
|
|||
startup_timeout = self.marionette.startup_timeout
|
||||
|
||||
# Use a timeout which always cause an IOError
|
||||
self.marionette.startup_timeout = 1
|
||||
self.marionette.startup_timeout = .1
|
||||
msg = "Process killed after {}s".format(self.marionette.startup_timeout)
|
||||
|
||||
try:
|
||||
|
|
|
@ -2408,6 +2408,9 @@ toolbar#nav-bar {
|
|||
options.e10s = False
|
||||
mozinfo.update({"e10s": options.e10s}) # for test manifest parsing.
|
||||
|
||||
if options.jscov_dir_prefix is not None:
|
||||
mozinfo.update({'coverage': True})
|
||||
|
||||
self.setTestRoot(options)
|
||||
|
||||
# Despite our efforts to clean up servers started by this script, in practice
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
"hg_l10n_base": "https://hg.mozilla.org/l10n-central",
|
||||
"hg_l10n_tag": "default",
|
||||
"l10n_dir": "l10n-central",
|
||||
"merge_locales": true,
|
||||
"mozilla_dir": "build",
|
||||
"mozconfig": "build/mobile/android/config/mozconfigs/android-x86/nightly"
|
||||
}
|
||||
|
|
|
@ -23,6 +23,5 @@
|
|||
"hg_l10n_base": "https://hg.mozilla.org/l10n-central",
|
||||
"hg_l10n_tag": "default",
|
||||
"l10n_dir": "l10n-central",
|
||||
"merge_locales": true,
|
||||
"mozilla_dir": "build"
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
"hg_l10n_base": "https://hg.mozilla.org/l10n-central",
|
||||
"hg_l10n_tag": "default",
|
||||
"l10n_dir": "l10n-central",
|
||||
"merge_locales": true,
|
||||
"mozilla_dir": "build",
|
||||
"mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/nightly"
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
"hg_l10n_base": "https://hg.mozilla.org/l10n-central",
|
||||
"hg_l10n_tag": "default",
|
||||
"l10n_dir": "l10n-central",
|
||||
"merge_locales": true,
|
||||
"mozilla_dir": "build",
|
||||
"mozconfig": "build/mobile/android/config/mozconfigs/android-x86/nightly"
|
||||
}
|
||||
|
|
|
@ -23,6 +23,5 @@
|
|||
"hg_l10n_base": "https://hg.mozilla.org/l10n-central",
|
||||
"hg_l10n_tag": "default",
|
||||
"l10n_dir": "l10n-central",
|
||||
"merge_locales": true,
|
||||
"mozilla_dir": "build"
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
"hg_l10n_base": "https://hg.mozilla.org/l10n-central",
|
||||
"hg_l10n_tag": "default",
|
||||
"l10n_dir": "l10n-central",
|
||||
"merge_locales": true,
|
||||
"mozilla_dir": "build",
|
||||
"mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/nightly"
|
||||
}
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче