This commit is contained in:
Wes Kocher 2016-08-25 17:14:17 -07:00
Родитель 8998b2ba96 f80822840b
Коммит 309fb2cc5f
171 изменённых файлов: 4369 добавлений и 1594 удалений

Просмотреть файл

@ -1488,6 +1488,7 @@ pref("browser.migrate.automigrate.enabled", false);
// 4 here means the suggestion notification will be automatically
// hidden the 4th day, so it will actually be shown on 3 different days.
pref("browser.migrate.automigrate.daysToOfferUndo", 4);
pref("browser.migrate.automigrate.ui.enabled", true);
// Enable browser frames for use on desktop. Only exposed to chrome callers.
pref("dom.mozBrowserFramesEnabled", true);

Просмотреть файл

@ -77,7 +77,7 @@ function convert(result) {
return node;
}
extensions.registerSchemaAPI("bookmarks", context => {
extensions.registerSchemaAPI("bookmarks", "addon_parent", context => {
return {
bookmarks: {
get: function(idOrIdList) {

Просмотреть файл

@ -365,7 +365,7 @@ extensions.on("shutdown", (type, extension) => {
});
/* eslint-enable mozilla/balanced-listeners */
extensions.registerSchemaAPI("browserAction", context => {
extensions.registerSchemaAPI("browserAction", "addon_parent", context => {
let {extension} = context;
return {
browserAction: {

Просмотреть файл

@ -228,7 +228,7 @@ extensions.on("shutdown", (type, extension) => {
});
/* eslint-enable mozilla/balanced-listeners */
extensions.registerSchemaAPI("commands", context => {
extensions.registerSchemaAPI("commands", "addon_parent", context => {
let {extension} = context;
return {
commands: {

Просмотреть файл

@ -485,7 +485,7 @@ extensions.on("shutdown", (type, extension) => {
});
/* eslint-enable mozilla/balanced-listeners */
extensions.registerSchemaAPI("contextMenus", context => {
extensions.registerSchemaAPI("contextMenus", "addon_parent", context => {
let {extension} = context;
return {
contextMenus: {

Просмотреть файл

@ -130,7 +130,7 @@ function getObserver() {
return _observer;
}
extensions.registerSchemaAPI("history", context => {
extensions.registerSchemaAPI("history", "addon_parent", context => {
return {
history: {
addUrl: function(details) {

Просмотреть файл

@ -217,7 +217,7 @@ PageAction.for = extension => {
global.pageActionFor = PageAction.for;
extensions.registerSchemaAPI("pageAction", context => {
extensions.registerSchemaAPI("pageAction", "addon_parent", context => {
let {extension} = context;
return {
pageAction: {

Просмотреть файл

@ -264,7 +264,7 @@ let tabListener = {
},
};
extensions.registerSchemaAPI("tabs", context => {
extensions.registerSchemaAPI("tabs", "addon_parent", context => {
let {extension} = context;
let self = {
tabs: {

Просмотреть файл

@ -15,7 +15,7 @@ var {
EventManager,
} = ExtensionUtils;
extensions.registerSchemaAPI("windows", context => {
extensions.registerSchemaAPI("windows", "addon_parent", context => {
let {extension} = context;
return {
windows: {

Просмотреть файл

@ -210,7 +210,7 @@ add_task(function* testBrowserActionClickCanceled() {
yield extension.startup();
const {GlobalManager, browserActionFor} = Cu.import("resource://gre/modules/Extension.jsm", {});
const {GlobalManager, Management: {global: {browserActionFor}}} = Cu.import("resource://gre/modules/Extension.jsm", {});
let ext = GlobalManager.extensionMap.get(extension.id);
let browserAction = browserActionFor(ext);

Просмотреть файл

@ -90,7 +90,7 @@ add_task(function* () {
yield Promise.all([extension.startup(), extension.awaitMessage("background-ready")]);
let {WindowManager} = Cu.import("resource://gre/modules/Extension.jsm", {});
let {Management: {global: {WindowManager}}} = Cu.import("resource://gre/modules/Extension.jsm", {});
let winId1 = WindowManager.getId(win1);
let winId2 = WindowManager.getId(win2);

Просмотреть файл

@ -104,7 +104,7 @@ add_task(function* () {
info("started");
let {WindowManager} = Cu.import("resource://gre/modules/Extension.jsm", {});
let {Management: {global: {WindowManager}}} = Cu.import("resource://gre/modules/Extension.jsm", {});
let winId1 = WindowManager.getId(win1);
let winId2 = WindowManager.getId(win2);

Просмотреть файл

@ -176,7 +176,7 @@ add_task(function* () {
});
extension.onMessage("change-tab", (tabId, attr, on) => {
let {TabManager} = Cu.import("resource://gre/modules/Extension.jsm", {});
let {Management: {global: {TabManager}}} = Cu.import("resource://gre/modules/Extension.jsm", {});
let tab = TabManager.getTab(tabId);

Просмотреть файл

@ -98,7 +98,7 @@ add_task(function* testDuplicateTabLazily() {
});
extension.onMessage("duplicate-tab", tabId => {
let {TabManager} = Cu.import("resource://gre/modules/Extension.jsm", {});
let {Management: {global: {TabManager}}} = Cu.import("resource://gre/modules/Extension.jsm", {});
let tab = TabManager.getTab(tabId);
// This is a bit of a hack to load a tab in the background.

Просмотреть файл

@ -201,7 +201,7 @@ add_task(function* () {
});
extension.onMessage("msg", (id, msg, ...args) => {
let {TabManager} = Cu.import("resource://gre/modules/Extension.jsm", {});
let {Management: {global: {TabManager}}} = Cu.import("resource://gre/modules/Extension.jsm", {});
let resp;
if (msg == "get-zoom") {

Просмотреть файл

@ -53,7 +53,7 @@ add_task(function* testWindowsEvents() {
yield extension.startup();
yield extension.awaitMessage("ready");
let {WindowManager} = Cu.import("resource://gre/modules/Extension.jsm", {});
let {Management: {global: {WindowManager}}} = Cu.import("resource://gre/modules/Extension.jsm", {});
let currentWindow = window;
let currentWindowId = WindowManager.getId(currentWindow);

Просмотреть файл

@ -9,6 +9,7 @@ this.EXPORTED_SYMBOLS = ["AutoMigrate"];
const { classes: Cc, interfaces: Ci, results: Cr, utils: Cu } = Components;
const kAutoMigrateEnabledPref = "browser.migrate.automigrate.enabled";
const kUndoUIEnabledPref = "browser.migrate.automigrate.ui.enabled";
const kAutoMigrateStartedPref = "browser.migrate.automigrate.started";
const kAutoMigrateFinishedPref = "browser.migrate.automigrate.finished";
@ -286,7 +287,8 @@ const AutoMigrate = {
maybeShowUndoNotification(target) {
// The tab might have navigated since we requested the undo state:
if (!this.canUndo() || target.currentURI.spec != "about:home") {
if (!this.canUndo() || target.currentURI.spec != "about:home" ||
!Preferences.get(kUndoUIEnabledPref, false)) {
return;
}

Просмотреть файл

@ -2390,7 +2390,13 @@ BrowserGlue.prototype = {
win.gBrowser.selectedTab = firstTab;
}
}
AlertsService.showAlertNotification(null, title, body, true, null, clickCallback);
// Specify an icon because on Windows no icon is shown at the moment
let imageURL;
if (AppConstants.platform == "win") {
imageURL = "chrome://branding/content/icon64.png";
}
AlertsService.showAlertNotification(imageURL, title, body, true, null, clickCallback);
} catch (ex) {
Cu.reportError("Error displaying tab(s) received by Sync: " + ex);
}

Просмотреть файл

@ -884,8 +884,7 @@ this.PlacesUIUtils = {
/**
* Gives the user a chance to cancel loading lots of tabs at once
*/
_confirmOpenInTabs:
function PUIU__confirmOpenInTabs(numTabsToOpen, aWindow) {
confirmOpenInTabs(numTabsToOpen, aWindow) {
const WARN_ON_OPEN_PREF = "browser.tabs.warnOnOpen";
var reallyOpen = true;
@ -988,7 +987,7 @@ this.PlacesUIUtils = {
urlsToOpen.push({uri: node.uri, isBookmark: false});
}
if (this._confirmOpenInTabs(urlsToOpen.length, window)) {
if (this.confirmOpenInTabs(urlsToOpen.length, window)) {
this._openTabset(urlsToOpen, aEvent, window);
}
}, Cu.reportError);
@ -999,7 +998,7 @@ this.PlacesUIUtils = {
let window = aView.ownerWindow;
let urlsToOpen = PlacesUtils.getURLsForContainerNode(aNode);
if (this._confirmOpenInTabs(urlsToOpen.length, window)) {
if (this.confirmOpenInTabs(urlsToOpen.length, window)) {
this._openTabset(urlsToOpen, aEvent, window);
}
},

Просмотреть файл

@ -13,6 +13,8 @@ let log = Cu.import("resource://gre/modules/Log.jsm", {})
XPCOMUtils.defineLazyModuleGetter(this, "BrowserUITelemetry",
"resource:///modules/BrowserUITelemetry.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "PlacesUIUtils",
"resource:///modules/PlacesUIUtils.jsm");
this.EXPORTED_SYMBOLS = [
"TabListComponent"
@ -47,6 +49,7 @@ TabListComponent.prototype = {
this._view = new this._View(this._window, {
onSelectRow: (...args) => this.onSelectRow(...args),
onOpenTab: (...args) => this.onOpenTab(...args),
onOpenTabs: (...args) => this.onOpenTabs(...args),
onMoveSelectionDown: (...args) => this.onMoveSelectionDown(...args),
onMoveSelectionUp: (...args) => this.onMoveSelectionUp(...args),
onToggleBranch: (...args) => this.onToggleBranch(...args),
@ -113,6 +116,21 @@ TabListComponent.prototype = {
BrowserUITelemetry.countSyncedTabEvent("open", "sidebar");
},
onOpenTabs(urls, where, params) {
if (!PlacesUIUtils.confirmOpenInTabs(urls.length, this._window)) {
return;
}
if (where == "window") {
this._window.openDialog(this._window.getBrowserURL(), "_blank",
"chrome,dialog=no,all", urls.join("|"));
} else {
for (let url of urls) {
this._window.openUILinkIn(url, where, params);
}
}
BrowserUITelemetry.countSyncedTabEvent("openmultiple", "sidebar");
},
onCopyTabLocation(url) {
this._clipboardHelper.copyString(url);
},

Просмотреть файл

@ -185,6 +185,7 @@ TabListView.prototype = {
// These listeners have to be re-created every time since we re-create the list
_attachListListeners() {
this.list.addEventListener("click", this.onClick.bind(this));
this.list.addEventListener("mouseup", this.onMouseUp.bind(this));
this.list.addEventListener("keydown", this.onKeyDown.bind(this));
},
@ -263,6 +264,12 @@ TabListView.prototype = {
}
},
onMouseUp(event) {
if (event.which == 2) { // Middle click
this.onClick(event);
}
},
onClick(event) {
let itemNode = this._findParentItemNode(event.target);
if (!itemNode) {
@ -276,7 +283,18 @@ TabListView.prototype = {
}
}
if (event.target.classList.contains("item-twisty-container")) {
// Middle click on a client
if (itemNode.classList.contains("client")) {
let where = getChromeWindow(this._window).whereToOpenLink(event);
if (where != "current") {
const tabs = itemNode.querySelector(".item-tabs-list").childNodes;
const urls = [...tabs].map(tab => tab.dataset.url);
this.props.onOpenTabs(urls, where, {});
}
}
if (event.target.classList.contains("item-twisty-container")
&& event.which != 2) {
this.props.onToggleBranch(itemNode.dataset.id);
return;
}

Просмотреть файл

@ -8,6 +8,7 @@ let { View } = Cu.import("resource:///modules/syncedtabs/TabListView.js", {});
const ACTION_METHODS = [
"onSelectRow",
"onOpenTab",
"onOpenTabs",
"onMoveSelectionDown",
"onMoveSelectionUp",
"onToggleBranch",
@ -77,6 +78,8 @@ add_task(function* testActions() {
},
PlacesUtils: { bookmarksMenuFolderId: "id" }
},
getBrowserURL() {},
openDialog() {},
openUILinkIn() {}
};
let component = new TabListComponent({
@ -124,6 +127,13 @@ add_task(function* testActions() {
component.onOpenTab("uri", "where", "params");
Assert.ok(windowMock.openUILinkIn.calledWith("uri", "where", "params"));
component.onOpenTabs(["uri1", "uri2"], "where", "params");
Assert.ok(windowMock.openUILinkIn.calledWith("uri1", "where", "params"));
Assert.ok(windowMock.openUILinkIn.calledWith("uri2", "where", "params"));
sinon.spy(windowMock, "openDialog");
component.onOpenTabs(["uri1", "uri2"], "window", "params");
Assert.deepEqual(windowMock.openDialog.args[0][3], ["uri1", "uri2"].join("|"));
sinon.spy(clipboardHelperMock, "copyString");
component.onCopyTabLocation("uri");
Assert.ok(clipboardHelperMock.copyString.calledWith("uri"));

Просмотреть файл

@ -358,8 +358,8 @@ ArrowScrollBox.prototype = {
function HTMLBreadcrumbs(inspector) {
this.inspector = inspector;
this.selection = this.inspector.selection;
this.chromeWin = this.inspector.panelWin;
this.chromeDoc = this.inspector.panelDoc;
this.win = this.inspector.panelWin;
this.doc = this.inspector.panelDoc;
this._init();
}
@ -371,9 +371,9 @@ HTMLBreadcrumbs.prototype = {
},
_init: function () {
this.outer = this.chromeDoc.getElementById("inspector-breadcrumbs");
this.outer = this.doc.getElementById("inspector-breadcrumbs");
this.arrowScrollBox = new ArrowScrollBox(
this.chromeWin,
this.win,
this.outer);
this.container = this.arrowScrollBox.inner;
@ -382,7 +382,7 @@ HTMLBreadcrumbs.prototype = {
// These separators are used for CSS purposes only, and are positioned
// off screen, but displayed with -moz-element.
this.separators = this.chromeDoc.createElementNS(NS_XHTML, "div");
this.separators = this.doc.createElementNS(NS_XHTML, "div");
this.separators.className = "breadcrumb-separator-container";
this.separators.innerHTML =
"<div id='breadcrumb-separator-before'></div>" +
@ -395,7 +395,7 @@ HTMLBreadcrumbs.prototype = {
this.outer.addEventListener("mouseout", this, true);
this.outer.addEventListener("focus", this, true);
this.shortcuts = new KeyShortcuts({ window: this.chromeWin, target: this.outer });
this.shortcuts = new KeyShortcuts({ window: this.win, target: this.outer });
this.handleShortcut = this.handleShortcut.bind(this);
this.shortcuts.on("Right", this.handleShortcut);
@ -458,16 +458,16 @@ HTMLBreadcrumbs.prototype = {
* @returns {DocumentFragment}
*/
prettyPrintNodeAsXHTML: function (node) {
let tagLabel = this.chromeDoc.createElementNS(NS_XHTML, "span");
let tagLabel = this.doc.createElementNS(NS_XHTML, "span");
tagLabel.className = "breadcrumbs-widget-item-tag plain";
let idLabel = this.chromeDoc.createElementNS(NS_XHTML, "span");
let idLabel = this.doc.createElementNS(NS_XHTML, "span");
idLabel.className = "breadcrumbs-widget-item-id plain";
let classesLabel = this.chromeDoc.createElementNS(NS_XHTML, "span");
let classesLabel = this.doc.createElementNS(NS_XHTML, "span");
classesLabel.className = "breadcrumbs-widget-item-classes plain";
let pseudosLabel = this.chromeDoc.createElementNS(NS_XHTML, "span");
let pseudosLabel = this.doc.createElementNS(NS_XHTML, "span");
pseudosLabel.className = "breadcrumbs-widget-item-pseudo-classes plain";
let tagText = node.displayName;
@ -506,7 +506,7 @@ HTMLBreadcrumbs.prototype = {
classesLabel.textContent = classesText;
pseudosLabel.textContent = node.pseudoClassLocks.join("");
let fragment = this.chromeDoc.createDocumentFragment();
let fragment = this.doc.createDocumentFragment();
fragment.appendChild(tagLabel);
fragment.appendChild(idLabel);
fragment.appendChild(classesLabel);
@ -661,9 +661,6 @@ HTMLBreadcrumbs.prototype = {
}
if (index > -1) {
this.nodeHierarchy[index].button.setAttribute("checked", "true");
if (this.hadFocus) {
this.nodeHierarchy[index].button.focus();
}
} else {
// Unset active active descendant when all buttons are unselected.
this.outer.removeAttribute("aria-activedescendant");
@ -703,7 +700,7 @@ HTMLBreadcrumbs.prototype = {
* @return {DOMNode} The <button> for this node.
*/
buildButton: function (node) {
let button = this.chromeDoc.createElementNS(NS_XHTML, "button");
let button = this.doc.createElementNS(NS_XHTML, "button");
button.appendChild(this.prettyPrintNodeAsXHTML(node));
button.className = "breadcrumbs-widget-item";
button.id = "breadcrumbs-widget-item-" + this.breadcrumbsWidgetItemId++;
@ -731,7 +728,7 @@ HTMLBreadcrumbs.prototype = {
* @param {NodeFront} node The node to reach.
*/
expand: function (node) {
let fragment = this.chromeDoc.createDocumentFragment();
let fragment = this.doc.createDocumentFragment();
let lastButtonInserted = null;
let originalLength = this.nodeHierarchy.length;
let stopNode = null;
@ -856,10 +853,6 @@ HTMLBreadcrumbs.prototype = {
return;
}
let cmdDispatcher = this.chromeDoc.commandDispatcher;
this.hadFocus = (cmdDispatcher.focusedElement &&
cmdDispatcher.focusedElement.parentNode == this.container);
if (!this.selection.isConnected()) {
// remove all the crumbs
this.cutAfter(-1);

Просмотреть файл

@ -143,6 +143,7 @@ KeyShortcuts.parseElectronKey = function (window, str) {
shortcut.keyCode = KeyCodes[key];
// Used only to stringify the shortcut
shortcut.keyCodeString = key;
shortcut.key = key;
} else {
console.error("Unsupported key:", key);
return null;
@ -199,11 +200,17 @@ KeyShortcuts.prototype = {
}
if (shortcut.keyCode) {
return event.keyCode == shortcut.keyCode;
} else if (event.key in ElectronKeysMapping) {
return ElectronKeysMapping[event.key] === shortcut.key;
}
// get the key from the keyCode if key is not provided.
let key = event.key || String.fromCharCode(event.keyCode);
// For character keys, we match if the final character is the expected one.
// But for digits we also accept indirect match to please azerty keyboard,
// which requires Shift to be pressed to get digits.
return event.key.toLowerCase() == shortcut.key ||
return key.toLowerCase() == shortcut.key ||
(shortcut.key.match(/[0-9]/) &&
event.keyCode == shortcut.key.charCodeAt(0));
},

Просмотреть файл

@ -10,6 +10,7 @@ add_task(function* () {
yield testSimple(shortcuts);
yield testNonLetterCharacter(shortcuts);
yield testPlusCharacter(shortcuts);
yield testFunctionKey(shortcuts);
yield testMixup(shortcuts);
yield testLooseDigits(shortcuts);
yield testExactModifiers(shortcuts);
@ -66,6 +67,17 @@ function testNonLetterCharacter(shortcuts) {
yield onKey;
}
function testFunctionKey(shortcuts) {
info("Test function key shortcuts");
let onKey = once(shortcuts, "F12", (key, event) => {
is(event.key, "F12");
});
EventUtils.synthesizeKey("F12", { keyCode: 123 }, window);
yield onKey;
}
// Plus is special. It's keycode is the one for "=". That's because it requires
// shift to be pressed and is behind "=" key. So it should be considered as a
// character key

Просмотреть файл

@ -20,7 +20,7 @@ loader.lazyRequireGetter(this, "ObjectClient", "devtools/shared/client/main", tr
const { extend } = require("sdk/core/heritage");
const XHTML_NS = "http://www.w3.org/1999/xhtml";
const XUL_NS = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul";
const STRINGS_URI = "chrome://devtools/locale/webconsole.properties";
const STRINGS_URI = "devtools/locale/webconsole.properties";
const WebConsoleUtils = require("devtools/client/webconsole/utils").Utils;
const { getSourceNames } = require("devtools/client/shared/source-utils");

Просмотреть файл

@ -22,7 +22,7 @@ loader.lazyRequireGetter(this, "DebuggerClient", "devtools/shared/client/main",
loader.lazyRequireGetter(this, "showDoorhanger", "devtools/client/shared/doorhanger", true);
loader.lazyRequireGetter(this, "viewSource", "devtools/client/shared/view-source");
const STRINGS_URI = "chrome://devtools/locale/webconsole.properties";
const STRINGS_URI = "devtools/locale/webconsole.properties";
var l10n = new WebConsoleUtils.L10n(STRINGS_URI);
const BROWSER_CONSOLE_WINDOW_FEATURES = "chrome,titlebar,toolbar,centerscreen,resizable,dialog=no";

Просмотреть файл

@ -27,7 +27,7 @@ loader.lazyImporter(this, "VariablesView", "resource://devtools/client/shared/wi
loader.lazyImporter(this, "VariablesViewController", "resource://devtools/client/shared/widgets/VariablesViewController.jsm");
loader.lazyRequireGetter(this, "gDevTools", "devtools/client/framework/devtools", true);
const STRINGS_URI = "chrome://devtools/locale/webconsole.properties";
const STRINGS_URI = "devtools/locale/webconsole.properties";
var l10n = new WebConsoleUtils.L10n(STRINGS_URI);
// Constants used for defining the direction of JSTerm input history navigation.

Просмотреть файл

@ -19,9 +19,8 @@ const NetRequest = require("./net-request");
const { loadSheet } = require("sdk/stylesheet/utils");
// Localization
const { Services } = Cu.import("resource://gre/modules/Services.jsm", {});
var networkStrings = Services.strings.createBundle(
"chrome://devtools/locale/netmonitor.properties");
const {LocalizationHelper} = require("devtools/client/shared/l10n");
const L10N = new LocalizationHelper("devtools/locale/netmonitor.properties");
// Stylesheets
var styleSheets = [
@ -55,7 +54,7 @@ styleSheets.forEach(url => {
this.Locale = {
$STR: key => {
try {
return networkStrings.GetStringFromName(key);
return L10N.getStr(key);
} catch (err) {
console.error(key + ": " + err);
}

Просмотреть файл

@ -7,7 +7,7 @@
"use strict";
const WebConsoleUtils = require("devtools/client/webconsole/utils").Utils;
const STRINGS_URI = "chrome://devtools/locale/webconsole.properties";
const STRINGS_URI = "devtools/locale/webconsole.properties";
const l10n = new WebConsoleUtils.L10n(STRINGS_URI);
const {

Просмотреть файл

@ -37,8 +37,7 @@ const SEVERITY_LOG = 3;
// The indent of a console group in pixels.
const GROUP_INDENT = 12;
const WEBCONSOLE_STRINGS_URI = "chrome://devtools/locale/" +
"webconsole.properties";
const WEBCONSOLE_STRINGS_URI = "devtools/locale/webconsole.properties";
var WCUL10n = new WebConsoleUtils.L10n(WEBCONSOLE_STRINGS_URI);
const DOCS_GA_PARAMS = "?utm_source=mozilla" +

Просмотреть файл

@ -8,6 +8,7 @@
const {Cc, Ci, Cu, components} = require("chrome");
const Services = require("Services");
const {LocalizationHelper} = require("devtools/client/shared/l10n");
// Match the function name from the result of toString() or toSource().
//
@ -335,19 +336,10 @@ exports.Utils = WebConsoleUtils;
// ////////////////////////////////////////////////////////////////////////
WebConsoleUtils.L10n = function (bundleURI) {
this._bundleUri = bundleURI;
this._helper = new LocalizationHelper(bundleURI);
};
WebConsoleUtils.L10n.prototype = {
_stringBundle: null,
get stringBundle() {
if (!this._stringBundle) {
this._stringBundle = Services.strings.createBundle(this._bundleUri);
}
return this._stringBundle;
},
/**
* Generates a formatted timestamp string for displaying in console messages.
*
@ -375,14 +367,12 @@ WebConsoleUtils.L10n.prototype = {
* The localized string.
*/
getStr: function (name) {
let result;
try {
result = this.stringBundle.GetStringFromName(name);
return this._helper.getStr(name);
} catch (ex) {
console.error("Failed to get string: " + name);
throw ex;
}
return result;
},
/**
@ -397,14 +387,11 @@ WebConsoleUtils.L10n.prototype = {
* The formatted local string.
*/
getFormatStr: function (name, array) {
let result;
try {
result = this.stringBundle.formatStringFromName(name, array,
array.length);
return this._helper.getFormatStr(name, ...array);
} catch (ex) {
console.error("Failed to format string: " + name);
throw ex;
}
return result;
},
};

Просмотреть файл

@ -39,7 +39,7 @@ loader.lazyImporter(this, "PluralForm", "resource://gre/modules/PluralForm.jsm")
loader.lazyRequireGetter(this, "KeyShortcuts", "devtools/client/shared/key-shortcuts", true);
loader.lazyRequireGetter(this, "ZoomKeys", "devtools/client/shared/zoom-keys");
const STRINGS_URI = "chrome://devtools/locale/webconsole.properties";
const STRINGS_URI = "devtools/locale/webconsole.properties";
var l10n = new WebConsoleUtils.L10n(STRINGS_URI);
const XHTML_NS = "http://www.w3.org/1999/xhtml";

Просмотреть файл

@ -87,19 +87,13 @@ namespace {
// ---------------------------------------------------------------------------
/* static */ already_AddRefed<Animation>
Animation::Constructor(const GlobalObject& aGlobal,
KeyframeEffectReadOnly* aEffect,
AnimationEffectReadOnly* aEffect,
const Optional<AnimationTimeline*>& aTimeline,
ErrorResult& aRv)
{
nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
RefPtr<Animation> animation = new Animation(global);
if (!aEffect) {
// Bug 1049975: We do not support null effect yet.
aRv.Throw(NS_ERROR_DOM_ANIM_NO_EFFECT_ERR);
return nullptr;
}
AnimationTimeline* timeline;
if (aTimeline.WasPassed()) {
timeline = aTimeline.Value();
@ -114,7 +108,7 @@ Animation::Constructor(const GlobalObject& aGlobal,
}
animation->SetTimelineNoUpdate(timeline);
animation->SetEffect(aEffect);
animation->SetEffectNoUpdate(aEffect);
return animation.forget();
}
@ -130,19 +124,90 @@ Animation::SetId(const nsAString& aId)
}
void
Animation::SetEffect(KeyframeEffectReadOnly* aEffect)
Animation::SetEffect(AnimationEffectReadOnly* aEffect)
{
SetEffectNoUpdate(aEffect);
PostUpdate();
}
// https://w3c.github.io/web-animations/#setting-the-target-effect
void
Animation::SetEffectNoUpdate(AnimationEffectReadOnly* aEffect)
{
RefPtr<Animation> kungFuDeathGrip(this);
if (mEffect == aEffect) {
return;
}
AutoMutationBatchForAnimation mb(*this);
bool wasRelevant = mIsRelevant;
if (mEffect) {
mEffect->SetAnimation(nullptr);
if (!aEffect) {
// If the new effect is null, call ResetPendingTasks before clearing
// mEffect since ResetPendingTasks needs it to get the appropriate
// PendingAnimationTracker.
ResetPendingTasks();
}
// We need to notify observers now because once we set mEffect to null
// we won't be able to find the target element to notify.
if (mIsRelevant) {
nsNodeUtils::AnimationRemoved(this);
}
// Break links with the old effect and then drop it.
RefPtr<AnimationEffectReadOnly> oldEffect = mEffect;
mEffect = nullptr;
oldEffect->SetAnimation(nullptr);
// The following will not do any notification because mEffect is null.
UpdateRelevance();
}
mEffect = aEffect;
if (mEffect) {
if (aEffect) {
// Break links from the new effect to its previous animation, if any.
RefPtr<AnimationEffectReadOnly> newEffect = aEffect;
Animation* prevAnim = aEffect->GetAnimation();
if (prevAnim) {
prevAnim->SetEffect(nullptr);
}
// Create links with the new effect.
mEffect = newEffect;
mEffect->SetAnimation(this);
// Update relevance and then notify possible add or change.
// If the target is different, the change notification will be ignored by
// AutoMutationBatchForAnimation.
UpdateRelevance();
if (wasRelevant && mIsRelevant) {
nsNodeUtils::AnimationChanged(this);
}
// Reschedule pending pause or pending play tasks.
// If we have a pending animation, it will either be registered
// in the pending animation tracker and have a null pending ready time,
// or, after it has been painted, it will be removed from the tracker
// and assigned a pending ready time.
// After updating the effect we'll typically need to repaint so if we've
// already been assigned a pending ready time, we should clear it and put
// the animation back in the tracker.
if (!mPendingReadyTime.IsNull()) {
mPendingReadyTime.SetNull();
nsIDocument* doc = GetRenderedDocument();
if (doc) {
PendingAnimationTracker* tracker =
doc->GetOrCreatePendingAnimationTracker();
if (mPendingState == PendingState::PlayPending) {
tracker->AddPlayPending(*this);
} else {
tracker->AddPausePending(*this);
}
}
}
}
UpdateTiming(SeekFlag::NoSeek, SyncNotifyFlag::Async);
@ -523,15 +588,20 @@ Animation::Tick()
if (IsPossiblyOrphanedPendingAnimation()) {
MOZ_ASSERT(mTimeline && !mTimeline->GetCurrentTime().IsNull(),
"Orphaned pending animtaions should have an active timeline");
"Orphaned pending animations should have an active timeline");
FinishPendingAt(mTimeline->GetCurrentTime().Value());
}
UpdateTiming(SeekFlag::NoSeek, SyncNotifyFlag::Async);
if (!mEffect) {
return;
}
// Update layers if we are newly finished.
if (mEffect &&
!mEffect->Properties().IsEmpty() &&
KeyframeEffectReadOnly* keyframeEffect = mEffect->AsKeyframeEffect();
if (keyframeEffect &&
!keyframeEffect->Properties().IsEmpty() &&
!mFinishedAtLastComposeStyle &&
PlayState() == AnimationPlayState::Finished) {
PostUpdate();
@ -671,12 +741,7 @@ Animation::SilentlySetPlaybackRate(double aPlaybackRate)
void
Animation::CancelNoUpdate()
{
if (mPendingState != PendingState::NotPending) {
CancelPendingTasks();
if (mReady) {
mReady->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
}
}
ResetPendingTasks();
if (mFinished) {
mFinished->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
@ -832,7 +897,10 @@ Animation::ComposeStyle(RefPtr<AnimValuesStyleRule>& aStyleRule,
}
}
mEffect->ComposeStyle(aStyleRule, aSetProperties);
KeyframeEffectReadOnly* keyframeEffect = mEffect->AsKeyframeEffect();
if (keyframeEffect) {
keyframeEffect->ComposeStyle(aStyleRule, aSetProperties);
}
}
MOZ_ASSERT(playState == PlayState(),
@ -1105,7 +1173,11 @@ Animation::UpdateEffect()
{
if (mEffect) {
UpdateRelevance();
mEffect->NotifyAnimationTimingUpdated();
KeyframeEffectReadOnly* keyframeEffect = mEffect->AsKeyframeEffect();
if (keyframeEffect) {
keyframeEffect->NotifyAnimationTimingUpdated();
}
}
}
@ -1121,16 +1193,25 @@ Animation::FlushStyle() const
void
Animation::PostUpdate()
{
nsPresContext* presContext = GetPresContext();
if (!presContext) {
if (!mEffect) {
return;
}
Maybe<NonOwningAnimationTarget> target = mEffect->GetTarget();
KeyframeEffectReadOnly* keyframeEffect = mEffect->AsKeyframeEffect();
if (!keyframeEffect) {
return;
}
Maybe<NonOwningAnimationTarget> target = keyframeEffect->GetTarget();
if (!target) {
return;
}
nsPresContext* presContext = keyframeEffect->GetPresContext();
if (!presContext) {
return;
}
presContext->EffectCompositor()
->RequestRestyle(target->mElement,
target->mPseudoType,
@ -1161,6 +1242,20 @@ Animation::CancelPendingTasks()
mPendingReadyTime.SetNull();
}
// https://w3c.github.io/web-animations/#reset-an-animations-pending-tasks
void
Animation::ResetPendingTasks()
{
if (mPendingState == PendingState::NotPending) {
return;
}
CancelPendingTasks();
if (mReady) {
mReady->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
}
}
bool
Animation::IsPossiblyOrphanedPendingAnimation() const
{
@ -1228,21 +1323,11 @@ Animation::EffectEnd() const
nsIDocument*
Animation::GetRenderedDocument() const
{
if (!mEffect) {
if (!mEffect || !mEffect->AsKeyframeEffect()) {
return nullptr;
}
return mEffect->GetRenderedDocument();
}
nsPresContext*
Animation::GetPresContext() const
{
if (!mEffect) {
return nullptr;
}
return mEffect->GetPresContext();
return mEffect->AsKeyframeEffect()->GetRenderedDocument();
}
void
@ -1314,7 +1399,9 @@ Animation::DispatchPlaybackEvent(const nsAString& aName)
bool
Animation::IsRunningOnCompositor() const
{
return mEffect && mEffect->IsRunningOnCompositor();
return mEffect &&
mEffect->AsKeyframeEffect() &&
mEffect->AsKeyframeEffect()->IsRunningOnCompositor();
}
} // namespace dom

Просмотреть файл

@ -92,13 +92,13 @@ public:
// Animation interface methods
static already_AddRefed<Animation>
Constructor(const GlobalObject& aGlobal,
KeyframeEffectReadOnly* aEffect,
AnimationEffectReadOnly* aEffect,
const Optional<AnimationTimeline*>& aTimeline,
ErrorResult& aRv);
void GetId(nsAString& aResult) const { aResult = mId; }
void SetId(const nsAString& aId);
KeyframeEffectReadOnly* GetEffect() const { return mEffect; }
void SetEffect(KeyframeEffectReadOnly* aEffect);
AnimationEffectReadOnly* GetEffect() const { return mEffect; }
void SetEffect(AnimationEffectReadOnly* aEffect);
AnimationTimeline* GetTimeline() const { return mTimeline; }
void SetTimeline(AnimationTimeline* aTimeline);
Nullable<TimeDuration> GetStartTime() const { return mStartTime; }
@ -146,6 +146,7 @@ public:
virtual void CancelFromStyle() { CancelNoUpdate(); }
void SetTimelineNoUpdate(AnimationTimeline* aTimeline);
void SetEffectNoUpdate(AnimationEffectReadOnly* aEffect);
virtual void Tick();
bool NeedsTicks() const
@ -370,14 +371,19 @@ protected:
*/
void CancelPendingTasks();
/**
* Performs the same steps as CancelPendingTasks and also rejects and
* recreates the ready promise if the animation was pending.
*/
void ResetPendingTasks();
bool IsPossiblyOrphanedPendingAnimation() const;
StickyTimeDuration EffectEnd() const;
nsIDocument* GetRenderedDocument() const;
nsPresContext* GetPresContext() const;
RefPtr<AnimationTimeline> mTimeline;
RefPtr<KeyframeEffectReadOnly> mEffect;
RefPtr<AnimationEffectReadOnly> mEffect;
// The beginning of the delay period.
Nullable<TimeDuration> mStartTime; // Timeline timescale
Nullable<TimeDuration> mHoldTime; // Animation timescale

Просмотреть файл

@ -6,11 +6,27 @@
#include "mozilla/dom/AnimationEffectReadOnly.h"
#include "mozilla/dom/AnimationEffectReadOnlyBinding.h"
#include "mozilla/AnimationUtils.h"
#include "mozilla/FloatingPoint.h"
namespace mozilla {
namespace dom {
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(AnimationEffectReadOnly, mDocument)
NS_IMPL_CYCLE_COLLECTION_CLASS(AnimationEffectReadOnly)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(AnimationEffectReadOnly)
if (tmp->mTiming) {
tmp->mTiming->Unlink();
}
NS_IMPL_CYCLE_COLLECTION_UNLINK(mDocument, mTiming, mAnimation)
NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(AnimationEffectReadOnly)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mDocument, mTiming, mAnimation)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_CYCLE_COLLECTION_TRACE_WRAPPERCACHE(AnimationEffectReadOnly)
NS_IMPL_CYCLE_COLLECTING_ADDREF(AnimationEffectReadOnly)
NS_IMPL_CYCLE_COLLECTING_RELEASE(AnimationEffectReadOnly)
@ -20,5 +36,313 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(AnimationEffectReadOnly)
NS_INTERFACE_MAP_ENTRY(nsISupports)
NS_INTERFACE_MAP_END
AnimationEffectReadOnly::AnimationEffectReadOnly(
nsIDocument* aDocument, AnimationEffectTimingReadOnly* aTiming)
: mDocument(aDocument)
, mTiming(aTiming)
{
MOZ_ASSERT(aTiming);
}
// https://w3c.github.io/web-animations/#in-play
bool
AnimationEffectReadOnly::IsInPlay() const
{
if (!mAnimation || mAnimation->PlayState() == AnimationPlayState::Finished) {
return false;
}
return GetComputedTiming().mPhase == ComputedTiming::AnimationPhase::Active;
}
// https://w3c.github.io/web-animations/#current
bool
AnimationEffectReadOnly::IsCurrent() const
{
if (!mAnimation || mAnimation->PlayState() == AnimationPlayState::Finished) {
return false;
}
ComputedTiming computedTiming = GetComputedTiming();
return computedTiming.mPhase == ComputedTiming::AnimationPhase::Before ||
computedTiming.mPhase == ComputedTiming::AnimationPhase::Active;
}
// https://w3c.github.io/web-animations/#in-effect
bool
AnimationEffectReadOnly::IsInEffect() const
{
ComputedTiming computedTiming = GetComputedTiming();
return !computedTiming.mProgress.IsNull();
}
already_AddRefed<AnimationEffectTimingReadOnly>
AnimationEffectReadOnly::Timing()
{
RefPtr<AnimationEffectTimingReadOnly> temp(mTiming);
return temp.forget();
}
void
AnimationEffectReadOnly::SetSpecifiedTiming(const TimingParams& aTiming)
{
if (mTiming->AsTimingParams() == aTiming) {
return;
}
mTiming->SetTimingParams(aTiming);
if (mAnimation) {
mAnimation->NotifyEffectTimingUpdated();
}
// For keyframe effects, NotifyEffectTimingUpdated above will eventually cause
// KeyframeEffectReadOnly::NotifyAnimationTimingUpdated to be called so it can
// update its registration with the target element as necessary.
}
ComputedTiming
AnimationEffectReadOnly::GetComputedTimingAt(
const Nullable<TimeDuration>& aLocalTime,
const TimingParams& aTiming,
double aPlaybackRate)
{
const StickyTimeDuration zeroDuration;
// Always return the same object to benefit from return-value optimization.
ComputedTiming result;
if (aTiming.mDuration) {
MOZ_ASSERT(aTiming.mDuration.ref() >= zeroDuration,
"Iteration duration should be positive");
result.mDuration = aTiming.mDuration.ref();
}
MOZ_ASSERT(aTiming.mIterations >= 0.0 && !IsNaN(aTiming.mIterations),
"mIterations should be nonnegative & finite, as ensured by "
"ValidateIterations or CSSParser");
result.mIterations = aTiming.mIterations;
MOZ_ASSERT(aTiming.mIterationStart >= 0.0,
"mIterationStart should be nonnegative, as ensured by "
"ValidateIterationStart");
result.mIterationStart = aTiming.mIterationStart;
result.mActiveDuration = aTiming.ActiveDuration();
result.mEndTime = aTiming.EndTime();
result.mFill = aTiming.mFill == dom::FillMode::Auto ?
dom::FillMode::None :
aTiming.mFill;
// The default constructor for ComputedTiming sets all other members to
// values consistent with an animation that has not been sampled.
if (aLocalTime.IsNull()) {
return result;
}
const TimeDuration& localTime = aLocalTime.Value();
// Calculate the time within the active interval.
// https://w3c.github.io/web-animations/#active-time
StickyTimeDuration activeTime;
StickyTimeDuration beforeActiveBoundary =
std::min(StickyTimeDuration(aTiming.mDelay), result.mEndTime);
StickyTimeDuration activeAfterBoundary =
std::min(StickyTimeDuration(aTiming.mDelay + result.mActiveDuration),
result.mEndTime);
if (localTime > activeAfterBoundary ||
(aPlaybackRate >= 0 && localTime == activeAfterBoundary)) {
result.mPhase = ComputedTiming::AnimationPhase::After;
if (!result.FillsForwards()) {
// The animation isn't active or filling at this time.
return result;
}
activeTime = std::max(std::min(result.mActiveDuration,
result.mActiveDuration + aTiming.mEndDelay),
zeroDuration);
} else if (localTime < beforeActiveBoundary ||
(aPlaybackRate < 0 && localTime == beforeActiveBoundary)) {
result.mPhase = ComputedTiming::AnimationPhase::Before;
if (!result.FillsBackwards()) {
// The animation isn't active or filling at this time.
return result;
}
// activeTime is zero
} else {
MOZ_ASSERT(result.mActiveDuration != zeroDuration,
"How can we be in the middle of a zero-duration interval?");
result.mPhase = ComputedTiming::AnimationPhase::Active;
activeTime = localTime - aTiming.mDelay;
}
// Convert active time to a multiple of iterations.
// https://w3c.github.io/web-animations/#overall-progress
double overallProgress;
if (result.mDuration == zeroDuration) {
overallProgress = result.mPhase == ComputedTiming::AnimationPhase::Before
? 0.0
: result.mIterations;
} else {
overallProgress = activeTime / result.mDuration;
}
// Factor in iteration start offset.
if (IsFinite(overallProgress)) {
overallProgress += result.mIterationStart;
}
// Determine the 0-based index of the current iteration.
// https://w3c.github.io/web-animations/#current-iteration
result.mCurrentIteration =
IsInfinite(result.mIterations) &&
result.mPhase == ComputedTiming::AnimationPhase::After
? UINT64_MAX // In GetComputedTimingDictionary(),
// we will convert this into Infinity
: static_cast<uint64_t>(overallProgress);
// Convert the overall progress to a fraction of a single iteration--the
// simply iteration progress.
// https://w3c.github.io/web-animations/#simple-iteration-progress
double progress = IsFinite(overallProgress)
? fmod(overallProgress, 1.0)
: fmod(result.mIterationStart, 1.0);
// When we finish exactly at the end of an iteration we need to report
// the end of the final iteration and not the start of the next iteration.
// We *don't* want to do this when we have a zero-iteration animation or
// when the animation has been effectively made into a zero-duration animation
// using a negative end-delay, however.
if (result.mPhase == ComputedTiming::AnimationPhase::After &&
progress == 0.0 &&
result.mIterations != 0.0 &&
(activeTime != zeroDuration || result.mDuration == zeroDuration)) {
// The only way we can be in the after phase with a progress of zero and
// a current iteration of zero, is if we have a zero iteration count or
// were clipped using a negative end delay--both of which we should have
// detected above.
MOZ_ASSERT(result.mCurrentIteration != 0,
"Should not have zero current iteration");
progress = 1.0;
if (result.mCurrentIteration != UINT64_MAX) {
result.mCurrentIteration--;
}
}
// Factor in the direction.
bool thisIterationReverse = false;
switch (aTiming.mDirection) {
case PlaybackDirection::Normal:
thisIterationReverse = false;
break;
case PlaybackDirection::Reverse:
thisIterationReverse = true;
break;
case PlaybackDirection::Alternate:
thisIterationReverse = (result.mCurrentIteration & 1) == 1;
break;
case PlaybackDirection::Alternate_reverse:
thisIterationReverse = (result.mCurrentIteration & 1) == 0;
break;
default:
MOZ_ASSERT(true, "Unknown PlaybackDirection type");
}
if (thisIterationReverse) {
progress = 1.0 - progress;
}
// Calculate the 'before flag' which we use when applying step timing
// functions.
if ((result.mPhase == ComputedTiming::AnimationPhase::After &&
thisIterationReverse) ||
(result.mPhase == ComputedTiming::AnimationPhase::Before &&
!thisIterationReverse)) {
result.mBeforeFlag = ComputedTimingFunction::BeforeFlag::Set;
}
// Apply the easing.
if (aTiming.mFunction) {
progress = aTiming.mFunction->GetValue(progress, result.mBeforeFlag);
}
MOZ_ASSERT(IsFinite(progress), "Progress value should be finite");
result.mProgress.SetValue(progress);
return result;
}
ComputedTiming
AnimationEffectReadOnly::GetComputedTiming(const TimingParams* aTiming) const
{
double playbackRate = mAnimation ? mAnimation->PlaybackRate() : 1;
return GetComputedTimingAt(GetLocalTime(),
aTiming ? *aTiming : SpecifiedTiming(),
playbackRate);
}
// Helper functions for generating a ComputedTimingProperties dictionary
static void
GetComputedTimingDictionary(const ComputedTiming& aComputedTiming,
const Nullable<TimeDuration>& aLocalTime,
const TimingParams& aTiming,
ComputedTimingProperties& aRetVal)
{
// AnimationEffectTimingProperties
aRetVal.mDelay = aTiming.mDelay.ToMilliseconds();
aRetVal.mEndDelay = aTiming.mEndDelay.ToMilliseconds();
aRetVal.mFill = aComputedTiming.mFill;
aRetVal.mIterations = aComputedTiming.mIterations;
aRetVal.mIterationStart = aComputedTiming.mIterationStart;
aRetVal.mDuration.SetAsUnrestrictedDouble() =
aComputedTiming.mDuration.ToMilliseconds();
aRetVal.mDirection = aTiming.mDirection;
// ComputedTimingProperties
aRetVal.mActiveDuration = aComputedTiming.mActiveDuration.ToMilliseconds();
aRetVal.mEndTime = aComputedTiming.mEndTime.ToMilliseconds();
aRetVal.mLocalTime = AnimationUtils::TimeDurationToDouble(aLocalTime);
aRetVal.mProgress = aComputedTiming.mProgress;
if (!aRetVal.mProgress.IsNull()) {
// Convert the returned currentIteration into Infinity if we set
// (uint64_t) aComputedTiming.mCurrentIteration to UINT64_MAX
double iteration = aComputedTiming.mCurrentIteration == UINT64_MAX
? PositiveInfinity<double>()
: static_cast<double>(aComputedTiming.mCurrentIteration);
aRetVal.mCurrentIteration.SetValue(iteration);
}
}
void
AnimationEffectReadOnly::GetComputedTimingAsDict(
ComputedTimingProperties& aRetVal) const
{
double playbackRate = mAnimation ? mAnimation->PlaybackRate() : 1;
const Nullable<TimeDuration> currentTime = GetLocalTime();
GetComputedTimingDictionary(GetComputedTimingAt(currentTime,
SpecifiedTiming(),
playbackRate),
currentTime,
SpecifiedTiming(),
aRetVal);
}
AnimationEffectReadOnly::~AnimationEffectReadOnly()
{
// mTiming is cycle collected, so we have to do null check first even though
// mTiming shouldn't be null during the lifetime of KeyframeEffect.
if (mTiming) {
mTiming->Unlink();
}
}
Nullable<TimeDuration>
AnimationEffectReadOnly::GetLocalTime() const
{
// Since the *animation* start time is currently always zero, the local
// time is equal to the parent time.
Nullable<TimeDuration> result;
if (mAnimation) {
result = mAnimation->GetCurrentTime();
}
return result;
}
} // namespace dom
} // namespace mozilla

Просмотреть файл

@ -7,14 +7,26 @@
#ifndef mozilla_dom_AnimationEffectReadOnly_h
#define mozilla_dom_AnimationEffectReadOnly_h
#include "mozilla/ComputedTiming.h"
#include "mozilla/dom/AnimationEffectTimingReadOnly.h"
#include "mozilla/dom/BindingDeclarations.h"
#include "mozilla/dom/Nullable.h"
#include "mozilla/Maybe.h"
#include "mozilla/StickyTimeDuration.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/TimingParams.h"
#include "nsCycleCollectionParticipant.h"
#include "nsWrapperCache.h"
namespace mozilla {
struct ElementPropertyTransition;
namespace dom {
class Animation;
class AnimationEffectTimingReadOnly;
class KeyframeEffectReadOnly;
struct ComputedTimingProperties;
class AnimationEffectReadOnly : public nsISupports,
@ -24,22 +36,61 @@ public:
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(AnimationEffectReadOnly)
explicit AnimationEffectReadOnly(nsIDocument* aDocument)
: mDocument(aDocument)
AnimationEffectReadOnly(nsIDocument* aDocument,
AnimationEffectTimingReadOnly* aTiming);
virtual KeyframeEffectReadOnly* AsKeyframeEffect() { return nullptr; }
virtual ElementPropertyTransition* AsTransition() { return nullptr; }
virtual const ElementPropertyTransition* AsTransition() const
{
return nullptr;
}
nsISupports* GetParentObject() const { return mDocument; }
virtual already_AddRefed<AnimationEffectTimingReadOnly> Timing() const = 0;
bool IsInPlay() const;
bool IsCurrent() const;
bool IsInEffect() const;
virtual void GetComputedTimingAsDict(ComputedTimingProperties& aRetVal) const = 0;
already_AddRefed<AnimationEffectTimingReadOnly> Timing();
const TimingParams& SpecifiedTiming() const
{
return mTiming->AsTimingParams();
}
void SetSpecifiedTiming(const TimingParams& aTiming);
// This function takes as input the timing parameters of an animation and
// returns the computed timing at the specified local time.
//
// The local time may be null in which case only static parameters such as the
// active duration are calculated. All other members of the returned object
// are given a null/initial value.
//
// This function returns a null mProgress member of the return value
// if the animation should not be run
// (because it is not currently active and is not filling at this time).
static ComputedTiming
GetComputedTimingAt(const Nullable<TimeDuration>& aLocalTime,
const TimingParams& aTiming,
double aPlaybackRate);
// Shortcut that gets the computed timing using the current local time as
// calculated from the timeline time.
ComputedTiming GetComputedTiming(const TimingParams* aTiming = nullptr) const;
void GetComputedTimingAsDict(ComputedTimingProperties& aRetVal) const;
virtual void SetAnimation(Animation* aAnimation) = 0;
Animation* GetAnimation() const { return mAnimation; };
protected:
virtual ~AnimationEffectReadOnly() = default;
virtual ~AnimationEffectReadOnly();
Nullable<TimeDuration> GetLocalTime() const;
protected:
RefPtr<nsIDocument> mDocument;
RefPtr<AnimationEffectTimingReadOnly> mTiming;
RefPtr<Animation> mAnimation;
};
} // namespace dom

Просмотреть файл

@ -24,55 +24,11 @@
#include "nsIScriptError.h"
namespace mozilla {
// Helper functions for generating a ComputedTimingProperties dictionary
static void
GetComputedTimingDictionary(const ComputedTiming& aComputedTiming,
const Nullable<TimeDuration>& aLocalTime,
const TimingParams& aTiming,
dom::ComputedTimingProperties& aRetVal)
{
// AnimationEffectTimingProperties
aRetVal.mDelay = aTiming.mDelay.ToMilliseconds();
aRetVal.mEndDelay = aTiming.mEndDelay.ToMilliseconds();
aRetVal.mFill = aComputedTiming.mFill;
aRetVal.mIterations = aComputedTiming.mIterations;
aRetVal.mIterationStart = aComputedTiming.mIterationStart;
aRetVal.mDuration.SetAsUnrestrictedDouble() =
aComputedTiming.mDuration.ToMilliseconds();
aRetVal.mDirection = aTiming.mDirection;
// ComputedTimingProperties
aRetVal.mActiveDuration = aComputedTiming.mActiveDuration.ToMilliseconds();
aRetVal.mEndTime = aComputedTiming.mEndTime.ToMilliseconds();
aRetVal.mLocalTime = AnimationUtils::TimeDurationToDouble(aLocalTime);
aRetVal.mProgress = aComputedTiming.mProgress;
if (!aRetVal.mProgress.IsNull()) {
// Convert the returned currentIteration into Infinity if we set
// (uint64_t) aComputedTiming.mCurrentIteration to UINT64_MAX
double iteration = aComputedTiming.mCurrentIteration == UINT64_MAX
? PositiveInfinity<double>()
: static_cast<double>(aComputedTiming.mCurrentIteration);
aRetVal.mCurrentIteration.SetValue(iteration);
}
}
namespace dom {
NS_IMPL_CYCLE_COLLECTION_CLASS(KeyframeEffectReadOnly)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(KeyframeEffectReadOnly,
AnimationEffectReadOnly)
if (tmp->mTiming) {
tmp->mTiming->Unlink();
}
NS_IMPL_CYCLE_COLLECTION_UNLINK(mTarget, mAnimation, mTiming)
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(KeyframeEffectReadOnly,
AnimationEffectReadOnly)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTarget, mAnimation, mTiming)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_CYCLE_COLLECTION_INHERITED(KeyframeEffectReadOnly,
AnimationEffectReadOnly,
mTarget)
NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN_INHERITED(KeyframeEffectReadOnly,
AnimationEffectReadOnly)
@ -101,14 +57,12 @@ KeyframeEffectReadOnly::KeyframeEffectReadOnly(
const Maybe<OwningAnimationTarget>& aTarget,
AnimationEffectTimingReadOnly* aTiming,
const KeyframeEffectParams& aOptions)
: AnimationEffectReadOnly(aDocument)
: AnimationEffectReadOnly(aDocument, aTiming)
, mTarget(aTarget)
, mTiming(aTiming)
, mEffectOptions(aOptions)
, mInEffectOnLastAnimationTimingUpdate(false)
, mCumulativeChangeHint(nsChangeHint(0))
{
MOZ_ASSERT(aTiming);
}
JSObject*
@ -130,28 +84,6 @@ KeyframeEffectReadOnly::Composite() const
return CompositeOperation::Replace;
}
already_AddRefed<AnimationEffectTimingReadOnly>
KeyframeEffectReadOnly::Timing() const
{
RefPtr<AnimationEffectTimingReadOnly> temp(mTiming);
return temp.forget();
}
void
KeyframeEffectReadOnly::SetSpecifiedTiming(const TimingParams& aTiming)
{
if (mTiming->AsTimingParams() == aTiming) {
return;
}
mTiming->SetTimingParams(aTiming);
if (mAnimation) {
mAnimation->NotifyEffectTimingUpdated();
}
// NotifyEffectTimingUpdated will eventually cause
// NotifyAnimationTimingUpdated to be called on this object which will
// update our registration with the target element.
}
void
KeyframeEffectReadOnly::NotifyAnimationTimingUpdated()
{
@ -205,249 +137,6 @@ KeyframeEffectReadOnly::NotifyAnimationTimingUpdated()
}
}
Nullable<TimeDuration>
KeyframeEffectReadOnly::GetLocalTime() const
{
// Since the *animation* start time is currently always zero, the local
// time is equal to the parent time.
Nullable<TimeDuration> result;
if (mAnimation) {
result = mAnimation->GetCurrentTime();
}
return result;
}
void
KeyframeEffectReadOnly::GetComputedTimingAsDict(
ComputedTimingProperties& aRetVal) const
{
double playbackRate = mAnimation ? mAnimation->PlaybackRate() : 1;
const Nullable<TimeDuration> currentTime = GetLocalTime();
GetComputedTimingDictionary(GetComputedTimingAt(currentTime,
SpecifiedTiming(),
playbackRate),
currentTime,
SpecifiedTiming(),
aRetVal);
}
ComputedTiming
KeyframeEffectReadOnly::GetComputedTimingAt(
const Nullable<TimeDuration>& aLocalTime,
const TimingParams& aTiming,
double aPlaybackRate)
{
const StickyTimeDuration zeroDuration;
// Always return the same object to benefit from return-value optimization.
ComputedTiming result;
if (aTiming.mDuration) {
MOZ_ASSERT(aTiming.mDuration.ref() >= zeroDuration,
"Iteration duration should be positive");
result.mDuration = aTiming.mDuration.ref();
}
MOZ_ASSERT(aTiming.mIterations >= 0.0 && !IsNaN(aTiming.mIterations),
"mIterations should be nonnegative & finite, as ensured by "
"ValidateIterations or CSSParser");
result.mIterations = aTiming.mIterations;
MOZ_ASSERT(aTiming.mIterationStart >= 0.0,
"mIterationStart should be nonnegative, as ensured by "
"ValidateIterationStart");
result.mIterationStart = aTiming.mIterationStart;
result.mActiveDuration = aTiming.ActiveDuration();
result.mEndTime = aTiming.EndTime();
result.mFill = aTiming.mFill == dom::FillMode::Auto ?
dom::FillMode::None :
aTiming.mFill;
// The default constructor for ComputedTiming sets all other members to
// values consistent with an animation that has not been sampled.
if (aLocalTime.IsNull()) {
return result;
}
const TimeDuration& localTime = aLocalTime.Value();
// Calculate the time within the active interval.
// https://w3c.github.io/web-animations/#active-time
StickyTimeDuration activeTime;
StickyTimeDuration beforeActiveBoundary =
std::min(StickyTimeDuration(aTiming.mDelay), result.mEndTime);
StickyTimeDuration activeAfterBoundary =
std::min(StickyTimeDuration(aTiming.mDelay + result.mActiveDuration),
result.mEndTime);
if (localTime > activeAfterBoundary ||
(aPlaybackRate >= 0 && localTime == activeAfterBoundary)) {
result.mPhase = ComputedTiming::AnimationPhase::After;
if (!result.FillsForwards()) {
// The animation isn't active or filling at this time.
return result;
}
activeTime = std::max(std::min(result.mActiveDuration,
result.mActiveDuration + aTiming.mEndDelay),
zeroDuration);
} else if (localTime < beforeActiveBoundary ||
(aPlaybackRate < 0 && localTime == beforeActiveBoundary)) {
result.mPhase = ComputedTiming::AnimationPhase::Before;
if (!result.FillsBackwards()) {
// The animation isn't active or filling at this time.
return result;
}
// activeTime is zero
} else {
MOZ_ASSERT(result.mActiveDuration != zeroDuration,
"How can we be in the middle of a zero-duration interval?");
result.mPhase = ComputedTiming::AnimationPhase::Active;
activeTime = localTime - aTiming.mDelay;
}
// Convert active time to a multiple of iterations.
// https://w3c.github.io/web-animations/#overall-progress
double overallProgress;
if (result.mDuration == zeroDuration) {
overallProgress = result.mPhase == ComputedTiming::AnimationPhase::Before
? 0.0
: result.mIterations;
} else {
overallProgress = activeTime / result.mDuration;
}
// Factor in iteration start offset.
if (IsFinite(overallProgress)) {
overallProgress += result.mIterationStart;
}
// Determine the 0-based index of the current iteration.
// https://w3c.github.io/web-animations/#current-iteration
result.mCurrentIteration =
IsInfinite(result.mIterations) &&
result.mPhase == ComputedTiming::AnimationPhase::After
? UINT64_MAX // In GetComputedTimingDictionary(),
// we will convert this into Infinity
: static_cast<uint64_t>(overallProgress);
// Convert the overall progress to a fraction of a single iteration--the
// simply iteration progress.
// https://w3c.github.io/web-animations/#simple-iteration-progress
double progress = IsFinite(overallProgress)
? fmod(overallProgress, 1.0)
: fmod(result.mIterationStart, 1.0);
// When we finish exactly at the end of an iteration we need to report
// the end of the final iteration and not the start of the next iteration.
// We *don't* want to do this when we have a zero-iteration animation or
// when the animation has been effectively made into a zero-duration animation
// using a negative end-delay, however.
if (result.mPhase == ComputedTiming::AnimationPhase::After &&
progress == 0.0 &&
result.mIterations != 0.0 &&
(activeTime != zeroDuration || result.mDuration == zeroDuration)) {
// The only way we can be in the after phase with a progress of zero and
// a current iteration of zero, is if we have a zero iteration count or
// were clipped using a negative end delay--both of which we should have
// detected above.
MOZ_ASSERT(result.mCurrentIteration != 0,
"Should not have zero current iteration");
progress = 1.0;
if (result.mCurrentIteration != UINT64_MAX) {
result.mCurrentIteration--;
}
}
// Factor in the direction.
bool thisIterationReverse = false;
switch (aTiming.mDirection) {
case PlaybackDirection::Normal:
thisIterationReverse = false;
break;
case PlaybackDirection::Reverse:
thisIterationReverse = true;
break;
case PlaybackDirection::Alternate:
thisIterationReverse = (result.mCurrentIteration & 1) == 1;
break;
case PlaybackDirection::Alternate_reverse:
thisIterationReverse = (result.mCurrentIteration & 1) == 0;
break;
default:
MOZ_ASSERT(true, "Unknown PlaybackDirection type");
}
if (thisIterationReverse) {
progress = 1.0 - progress;
}
// Calculate the 'before flag' which we use when applying step timing
// functions.
if ((result.mPhase == ComputedTiming::AnimationPhase::After &&
thisIterationReverse) ||
(result.mPhase == ComputedTiming::AnimationPhase::Before &&
!thisIterationReverse)) {
result.mBeforeFlag = ComputedTimingFunction::BeforeFlag::Set;
}
// Apply the easing.
if (aTiming.mFunction) {
progress = aTiming.mFunction->GetValue(progress, result.mBeforeFlag);
}
MOZ_ASSERT(IsFinite(progress), "Progress value should be finite");
result.mProgress.SetValue(progress);
return result;
}
ComputedTiming
KeyframeEffectReadOnly::GetComputedTiming(const TimingParams* aTiming) const
{
double playbackRate = mAnimation ? mAnimation->PlaybackRate() : 1;
return GetComputedTimingAt(GetLocalTime(),
aTiming ? *aTiming : SpecifiedTiming(),
playbackRate);
}
// https://w3c.github.io/web-animations/#in-play
bool
KeyframeEffectReadOnly::IsInPlay() const
{
if (!mAnimation || mAnimation->PlayState() == AnimationPlayState::Finished) {
return false;
}
return GetComputedTiming().mPhase == ComputedTiming::AnimationPhase::Active;
}
// https://w3c.github.io/web-animations/#current
bool
KeyframeEffectReadOnly::IsCurrent() const
{
if (!mAnimation || mAnimation->PlayState() == AnimationPlayState::Finished) {
return false;
}
ComputedTiming computedTiming = GetComputedTiming();
return computedTiming.mPhase == ComputedTiming::AnimationPhase::Before ||
computedTiming.mPhase == ComputedTiming::AnimationPhase::Active;
}
// https://w3c.github.io/web-animations/#in-effect
bool
KeyframeEffectReadOnly::IsInEffect() const
{
ComputedTiming computedTiming = GetComputedTiming();
return !computedTiming.mProgress.IsNull();
}
void
KeyframeEffectReadOnly::SetAnimation(Animation* aAnimation)
{
mAnimation = aAnimation;
NotifyAnimationTimingUpdated();
}
static bool
KeyframesEqualIgnoringComputedOffsets(const nsTArray<Keyframe>& aLhs,
const nsTArray<Keyframe>& aRhs)
@ -772,10 +461,6 @@ KeyframeEffectReadOnly::ResetIsRunningOnCompositor()
}
}
KeyframeEffectReadOnly::~KeyframeEffectReadOnly()
{
}
static const KeyframeEffectOptions&
KeyframeEffectOptionsFromUnion(
const UnrestrictedDoubleOrKeyframeEffectOptions& aOptions)
@ -1535,6 +1220,32 @@ KeyframeEffectReadOnly::CalculateCumulativeChangeHint(
}
}
void
KeyframeEffectReadOnly::SetAnimation(Animation* aAnimation)
{
if (mAnimation == aAnimation) {
return;
}
// Restyle for the old animation.
RequestRestyle(EffectCompositor::RestyleType::Layer);
mAnimation = aAnimation;
// Restyle for the new animation.
RequestRestyle(EffectCompositor::RestyleType::Layer);
if (mTarget) {
EffectSet* effectSet = EffectSet::GetEffectSet(mTarget->mElement,
mTarget->mPseudoType);
if (effectSet) {
effectSet->MarkCascadeNeedsUpdate();
}
}
NotifyAnimationTimingUpdated();
}
bool
KeyframeEffectReadOnly::CanIgnoreIfNotVisible() const
{
@ -1683,14 +1394,5 @@ KeyframeEffect::SetTarget(const Nullable<ElementOrCSSPseudoElement>& aTarget)
}
}
KeyframeEffect::~KeyframeEffect()
{
// mTiming is cycle collected, so we have to do null check first even though
// mTiming shouldn't be null during the lifetime of KeyframeEffect.
if (mTiming) {
mTiming->Unlink();
}
}
} // namespace dom
} // namespace mozilla

Просмотреть файл

@ -16,20 +16,13 @@
#include "mozilla/AnimationPerformanceWarning.h"
#include "mozilla/AnimationTarget.h"
#include "mozilla/Attributes.h"
#include "mozilla/ComputedTiming.h"
#include "mozilla/ComputedTimingFunction.h"
#include "mozilla/EffectCompositor.h"
#include "mozilla/KeyframeEffectParams.h"
#include "mozilla/LayerAnimationInfo.h" // LayerAnimations::kRecords
#include "mozilla/Maybe.h"
#include "mozilla/StickyTimeDuration.h"
#include "mozilla/StyleAnimationValue.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/TimingParams.h"
#include "mozilla/dom/AnimationEffectReadOnly.h"
#include "mozilla/dom/AnimationEffectTimingReadOnly.h"
#include "mozilla/dom/Element.h"
#include "mozilla/dom/Nullable.h"
struct JSContext;
class nsCSSPropertyIDSet;
@ -205,11 +198,7 @@ public:
virtual JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) override;
virtual ElementPropertyTransition* AsTransition() { return nullptr; }
virtual const ElementPropertyTransition* AsTransition() const
{
return nullptr;
}
KeyframeEffectReadOnly* AsKeyframeEffect() override { return this; }
// KeyframeEffectReadOnly interface
static already_AddRefed<KeyframeEffectReadOnly>
@ -241,46 +230,9 @@ public:
mEffectOptions.GetSpacingAsString(aRetVal);
}
already_AddRefed<AnimationEffectTimingReadOnly> Timing() const override;
const TimingParams& SpecifiedTiming() const
{
return mTiming->AsTimingParams();
}
void SetSpecifiedTiming(const TimingParams& aTiming);
void NotifyAnimationTimingUpdated();
Nullable<TimeDuration> GetLocalTime() const;
// This function takes as input the timing parameters of an animation and
// returns the computed timing at the specified local time.
//
// The local time may be null in which case only static parameters such as the
// active duration are calculated. All other members of the returned object
// are given a null/initial value.
//
// This function returns a null mProgress member of the return value
// if the animation should not be run
// (because it is not currently active and is not filling at this time).
static ComputedTiming
GetComputedTimingAt(const Nullable<TimeDuration>& aLocalTime,
const TimingParams& aTiming,
double aPlaybackRate);
// Shortcut for that gets the computed timing using the current local time as
// calculated from the timeline time.
ComputedTiming
GetComputedTiming(const TimingParams* aTiming = nullptr) const;
void
GetComputedTimingAsDict(ComputedTimingProperties& aRetVal) const override;
bool IsInPlay() const;
bool IsCurrent() const;
bool IsInEffect() const;
void SetAnimation(Animation* aAnimation);
Animation* GetAnimation() const { return mAnimation; }
void SetAnimation(Animation* aAnimation) override;
void SetKeyframes(JSContext* aContext, JS::Handle<JSObject*> aKeyframes,
ErrorResult& aRv);
@ -288,13 +240,16 @@ public:
nsStyleContext* aStyleContext);
const AnimationProperty*
GetAnimationOfProperty(nsCSSPropertyID aProperty) const;
bool HasAnimationOfProperty(nsCSSPropertyID aProperty) const {
bool HasAnimationOfProperty(nsCSSPropertyID aProperty) const
{
return GetAnimationOfProperty(aProperty) != nullptr;
}
const InfallibleTArray<AnimationProperty>& Properties() const {
const InfallibleTArray<AnimationProperty>& Properties() const
{
return mProperties;
}
InfallibleTArray<AnimationProperty>& Properties() {
InfallibleTArray<AnimationProperty>& Properties()
{
return mProperties;
}
@ -354,7 +309,7 @@ protected:
AnimationEffectTimingReadOnly* aTiming,
const KeyframeEffectParams& aOptions);
virtual ~KeyframeEffectReadOnly();
~KeyframeEffectReadOnly() override = default;
template<class KeyframeEffectType, class OptionsType>
static already_AddRefed<KeyframeEffectType>
@ -397,9 +352,7 @@ protected:
GetTargetStyleContext();
Maybe<OwningAnimationTarget> mTarget;
RefPtr<Animation> mAnimation;
RefPtr<AnimationEffectTimingReadOnly> mTiming;
KeyframeEffectParams mEffectOptions;
// The specified keyframes.
@ -472,9 +425,6 @@ public:
// that to update the properties rather than calling
// GetStyleContextForElement.
void SetTarget(const Nullable<ElementOrCSSPseudoElement>& aTarget);
protected:
~KeyframeEffect() override;
};
} // namespace dom

Просмотреть файл

@ -1794,6 +1794,108 @@ addAsyncAnimTest("set_redundant_animation_target",
yield await_frame();
});
addAsyncAnimTest("set_null_animation_effect",
{ observe: div, subtree: true }, function*() {
var anim = div.animate({ opacity: [ 0, 1 ] },
{ duration: 100 * MS_PER_SEC });
yield await_frame();
assert_records([{ added: [anim], changed: [], removed: [] }],
"records after animation is added");
anim.effect = null;
yield await_frame();
assert_records([{ added: [], changed: [], removed: [anim] }],
"records after animation is removed");
anim.cancel();
yield await_frame();
});
addAsyncAnimTest("set_effect_on_null_effect_animation",
{ observe: div, subtree: true }, function*() {
var anim = new Animation();
anim.play();
anim.effect = new KeyframeEffect(div, { opacity: [ 0, 1 ] },
100 * MS_PER_SEC);
yield await_frame();
assert_records([{ added: [anim], changed: [], removed: [] }],
"records after animation is added");
anim.cancel();
yield await_frame();
});
addAsyncAnimTest("replace_effect_targeting_on_the_same_element",
{ observe: div, subtree: true }, function*() {
var anim = div.animate({ marginLeft: [ "0px", "100px" ] },
100 * MS_PER_SEC);
yield await_frame();
assert_records([{ added: [anim], changed: [], removed: [] }],
"records after animation is added");
anim.effect = new KeyframeEffect(div, { opacity: [ 0, 1 ] },
100 * MS_PER_SEC);
yield await_frame();
assert_records([{ added: [], changed: [anim], removed: [] }],
"records after replace effects");
anim.cancel();
yield await_frame();
});
addAsyncAnimTest("replace_effect_targeting_on_the_same_element_not_in_effect",
{ observe: div, subtree: true }, function*() {
var anim = div.animate({ marginLeft: [ "0px", "100px" ] },
100 * MS_PER_SEC);
yield await_frame();
assert_records([{ added: [anim], changed: [], removed: [] }],
"records after animation is added");
anim.currentTime = 60 * MS_PER_SEC;
yield await_frame();
assert_records([{ added: [], changed: [anim], removed: [] }],
"records after animation is changed");
anim.effect = new KeyframeEffect(div, { opacity: [ 0, 1 ] },
50 * MS_PER_SEC);
yield await_frame();
assert_records([{ added: [], changed: [], removed: [anim] }],
"records after replacing effects");
anim.cancel();
yield await_frame();
});
addAsyncAnimTest("set_effect_with_previous_animation",
{ observe: div, subtree: true }, function*() {
var child = document.createElement("div");
div.appendChild(child);
var anim1 = div.animate({ marginLeft: [ "0px", "50px" ] },
100 * MS_PER_SEC);
var anim2 = child.animate({ marginLeft: [ "0px", "100px" ] },
50 * MS_PER_SEC);
yield await_frame();
assert_records([{ added: [anim1], changed: [], removed: [] },
{ added: [anim2], changed: [], removed: [] }],
"records after animation is added");
// After setting a new effect, we remove the current animation, anim1, because
// it is no longer attached to |div|, and then remove the previous animation,
// anim2. Finally, add back the anim1 which is in effect on |child| now.
// In addition, we sort them by tree order and they are batched.
anim1.effect = anim2.effect;
yield await_frame();
assert_records([{ added: [], changed: [], removed: [anim1] }, // div
{ added: [anim1], changed: [], removed: [anim2] }], // child
"records after animation effects are changed");
anim1.cancel();
anim2.cancel();
child.remove();
yield await_frame();
});
// Run the tests.
SimpleTest.requestLongerTimeout(2);
SimpleTest.waitForExplicitFinish();

Просмотреть файл

@ -515,6 +515,31 @@ promise_test(function(t) {
}, '100% opacity animation set up by converting an existing animation with ' +
'cannot be run on the compositor, is running on the compositor');
promise_test(function(t) {
var div = addDiv(t);
var animation = div.animate({ color: ['red', 'black'] }, 100 * MS_PER_SEC);
var effect = new KeyframeEffect(div,
[{ opacity: 1, offset: 0 },
{ opacity: 1, offset: 0.99 },
{ opacity: 0, offset: 1 }],
100 * MS_PER_SEC);
return animation.ready.then(function() {
assert_equals(animation.isRunningOnCompositor, false,
'Color animation reports that it is not running on the ' +
'compositor');
animation.effect = effect;
return waitForFrame();
}).then(function() {
assert_equals(animation.isRunningOnCompositor, omtaEnabled,
'100% opacity animation set up by changing effects reports ' +
'that it is running on the compositor');
});
}, '100% opacity animation set up by changing the effects on an existing ' +
'animation which cannot be run on the compositor, is running on the ' +
'compositor');
promise_test(function(t) {
var div = addDiv(t, { style: "opacity: 1 ! important" });

Просмотреть файл

@ -0,0 +1,91 @@
<!doctype html>
<meta charset=utf-8>
<script src='../testcommon.js'></script>
<body>
<script>
'use strict';
promise_test(function(t) {
var div = addDiv(t);
div.style.left = '0px';
div.style.transition = 'left 100s';
flushComputedStyle(div);
div.style.left = '100px';
var transition = div.getAnimations()[0];
return transition.ready.then(function() {
transition.currentTime = 50 * MS_PER_SEC;
transition.effect = null;
assert_equals(transition.transitionProperty, 'left');
assert_equals(transition.playState, 'finished');
assert_equals(window.getComputedStyle(div).left, '100px');
});
}, 'Test for removing a transition effect');
promise_test(function(t) {
var div = addDiv(t);
div.style.left = '0px';
div.style.transition = 'left 100s';
flushComputedStyle(div);
div.style.left = '100px';
var transition = div.getAnimations()[0];
return transition.ready.then(function() {
transition.currentTime = 50 * MS_PER_SEC;
transition.effect = new KeyframeEffect(div,
{ marginLeft: [ '0px' , '100px'] },
100 * MS_PER_SEC);
assert_equals(transition.transitionProperty, 'left');
assert_equals(transition.playState, 'running');
assert_equals(window.getComputedStyle(div).left, '100px');
assert_equals(window.getComputedStyle(div).marginLeft, '50px');
});
}, 'Test for replacing the transition effect by a new keyframe effect');
promise_test(function(t) {
var div = addDiv(t);
div.style.left = '0px';
div.style.width = '0px';
div.style.transition = 'left 100s';
flushComputedStyle(div);
div.style.left = '100px';
var transition = div.getAnimations()[0];
return transition.ready.then(function() {
transition.currentTime = 50 * MS_PER_SEC;
transition.effect = new KeyframeEffect(div,
{ marginLeft: [ '0px' , '100px'] },
20 * MS_PER_SEC);
assert_equals(transition.playState, 'finished');
});
}, 'Test for setting a new keyframe effect with a shorter duration');
promise_test(function(t) {
var div = addDiv(t);
div.style.left = '0px';
div.style.width = '0px';
div.style.transition = 'left 100s';
flushComputedStyle(div);
div.style.left = '100px';
var transition = div.getAnimations()[0];
assert_equals(transition.playState, 'pending');
transition.effect = new KeyframeEffect(div,
{ marginLeft: [ '0px' , '100px'] },
100 * MS_PER_SEC);
assert_equals(transition.transitionProperty, 'left');
assert_equals(transition.playState, 'pending');
return transition.ready.then(function() {
assert_equals(transition.playState, 'running');
});
}, 'Test for setting a new keyframe effect to a pending transition');
done();
</script>
</body>

Просмотреть файл

@ -0,0 +1,14 @@
<!doctype html>
<meta charset=utf-8>
<script src='/resources/testharness.js'></script>
<script src='/resources/testharnessreport.js'></script>
<div id='log'></div>
<script>
'use strict';
setup({explicit_done: true});
SpecialPowers.pushPrefEnv(
{ 'set': [['dom.animations-api.core.enabled', true]]},
function() {
window.open('file_setting-effect.html');
});
</script>

Просмотреть файл

@ -34,6 +34,7 @@ support-files =
css-transitions/file_element-get-animations.html
css-transitions/file_keyframeeffect-getkeyframes.html
css-transitions/file_pseudoElement-get-animations.html
css-transitions/file_setting-effect.html
document-timeline/file_document-timeline.html
mozilla/file_cubic_bezier_limits.html
mozilla/file_deferred_start.html
@ -46,6 +47,7 @@ support-files =
mozilla/file_underlying-discrete-value.html
style/file_animation-seeking-with-current-time.html
style/file_animation-seeking-with-start-time.html
style/file_animation-setting-effect.html
testcommon.js
[css-animations/test_animations-dynamic-changes.html]
@ -81,6 +83,7 @@ skip-if = buildapp == 'mulet'
skip-if = buildapp == 'mulet'
[css-transitions/test_keyframeeffect-getkeyframes.html]
[css-transitions/test_pseudoElement-get-animations.html]
[css-transitions/test_setting-effect.html]
[document-timeline/test_document-timeline.html]
[document-timeline/test_request_animation_frame.html]
skip-if = buildapp == 'mulet'
@ -97,3 +100,4 @@ skip-if = (toolkit == 'gonk' && debug)
[mozilla/test_underlying-discrete-value.html]
[style/test_animation-seeking-with-current-time.html]
[style/test_animation-seeking-with-start-time.html]
[style/test_animation-setting-effect.html]

Просмотреть файл

@ -0,0 +1,125 @@
<!doctype html>
<html>
<head>
<meta charset=utf-8>
<title>Tests for setting effects by using Animation.effect</title>
<script src='../testcommon.js'></script>
</head>
<body>
<script type='text/javascript'>
'use strict';
test(function(t) {
var target = addDiv(t);
var anim = new Animation();
anim.effect = new KeyframeEffectReadOnly(target,
{ marginLeft: [ '0px', '100px' ] },
100 * MS_PER_SEC);
anim.currentTime = 50 * MS_PER_SEC;
assert_equals(getComputedStyle(target).marginLeft, '50px');
}, 'After setting target effect on an animation with null effect, the ' +
'animation still works');
test(function(t) {
var target = addDiv(t);
target.style.marginLeft = '10px';
var anim = target.animate({ marginLeft: [ '0px', '100px' ] },
100 * MS_PER_SEC);
anim.currentTime = 50 * MS_PER_SEC;
assert_equals(getComputedStyle(target).marginLeft, '50px');
anim.effect = null;
assert_equals(getComputedStyle(target).marginLeft, '10px');
}, 'After setting null target effect, the computed style of the target ' +
'element becomes the initial value');
test(function(t) {
var target = addDiv(t);
var animA = target.animate({ marginLeft: [ '0px', '100px' ] },
100 * MS_PER_SEC);
var animB = new Animation();
animA.currentTime = 50 * MS_PER_SEC;
animB.currentTime = 20 * MS_PER_SEC;
assert_equals(getComputedStyle(target).marginLeft, '50px',
'original computed style of the target element');
animB.effect = animA.effect;
assert_equals(getComputedStyle(target).marginLeft, '20px',
'new computed style of the target element');
}, 'After setting the target effect from an existing animation, the computed ' +
'style of the target effect should reflect the time of the updated ' +
'animation.');
test(function(t) {
var target = addDiv(t);
target.style.marginTop = '-10px';
var animA = target.animate({ marginLeft: [ '0px', '100px' ] },
100 * MS_PER_SEC);
var animB = target.animate({ marginTop: [ '0px', '100px' ] },
50 * MS_PER_SEC);
animA.currentTime = 50 * MS_PER_SEC;
animB.currentTime = 10 * MS_PER_SEC;
assert_equals(getComputedStyle(target).marginLeft, '50px',
'original margin-left of the target element');
assert_equals(getComputedStyle(target).marginTop, '20px',
'original margin-top of the target element');
animB.effect = animA.effect;
assert_equals(getComputedStyle(target).marginLeft, '10px',
'new margin-left of the target element');
assert_equals(getComputedStyle(target).marginTop, '-10px',
'new margin-top of the target element');
}, 'After setting target effect with an animation to another animation which ' +
'also has an target effect and both animation effects target to the same ' +
'element, the computed style of this element should reflect the time and ' +
'effect of the animation that was set');
test(function(t) {
var targetA = addDiv(t);
var targetB = addDiv(t);
targetB.style.marginLeft = '-10px';
var animA = targetA.animate({ marginLeft: [ '0px', '100px' ] },
100 * MS_PER_SEC);
var animB = targetB.animate({ marginLeft: [ '0px', '100px' ] },
50 * MS_PER_SEC);
animA.currentTime = 50 * MS_PER_SEC;
animB.currentTime = 10 * MS_PER_SEC;
assert_equals(getComputedStyle(targetA).marginLeft, '50px',
'original margin-left of the first element');
assert_equals(getComputedStyle(targetB).marginLeft, '20px',
'original margin-left of the second element');
animB.effect = animA.effect;
assert_equals(getComputedStyle(targetA).marginLeft, '10px',
'new margin-left of the first element');
assert_equals(getComputedStyle(targetB).marginLeft, '-10px',
'new margin-left of the second element');
}, 'After setting target effect with an animation to another animation which ' +
'also has an target effect and these animation effects target to ' +
'different elements, the computed styles of the two elements should ' +
'reflect the time and effect of the animation that was set');
test(function(t) {
var target = addDiv(t);
var animA = target.animate({ marginLeft: [ '0px', '100px' ] },
50 * MS_PER_SEC);
var animB = target.animate({ marginTop: [ '0px', '50px' ] },
100 * MS_PER_SEC);
animA.currentTime = 20 * MS_PER_SEC;
animB.currentTime = 30 * MS_PER_SEC;
assert_equals(getComputedStyle(target).marginLeft, '40px');
assert_equals(getComputedStyle(target).marginTop, '15px');
var effectA = animA.effect;
animA.effect = animB.effect;
animB.effect = effectA;
assert_equals(getComputedStyle(target).marginLeft, '60px');
assert_equals(getComputedStyle(target).marginTop, '10px');
}, 'After swapping effects of two playing animations, both animations are ' +
'still running with the same current time');
done();
</script>
</body>
</html>

Просмотреть файл

@ -0,0 +1,15 @@
<!doctype html>
<meta charset=utf-8>
<script src='/resources/testharness.js'></script>
<script src='/resources/testharnessreport.js'></script>
<div id='log'></div>
<script>
'use strict';
setup({explicit_done: true});
SpecialPowers.pushPrefEnv(
{ 'set': [['dom.animations-api.core.enabled', true]]},
function() {
window.open('file_animation-setting-effect.html');
});
</script>
</html>

Просмотреть файл

@ -119,7 +119,6 @@ DOM4_MSG_DEF(BtAuthRejectedError, "Authentication rejected", NS_ERROR_DOM_BLUET
/* Web Animations errors */
DOM4_MSG_DEF(NotSupportedError, "Animation to or from an underlying value is not yet supported.", NS_ERROR_DOM_ANIM_MISSING_PROPS_ERR)
DOM4_MSG_DEF(NotSupportedError, "Animation with no effect is not yet supported.", NS_ERROR_DOM_ANIM_NO_EFFECT_ERR)
/* common global codes (from nsError.h) */

Просмотреть файл

@ -390,12 +390,18 @@ void
nsAnimationReceiver::RecordAnimationMutation(Animation* aAnimation,
AnimationMutation aMutationType)
{
mozilla::dom::KeyframeEffectReadOnly* effect = aAnimation->GetEffect();
mozilla::dom::AnimationEffectReadOnly* effect = aAnimation->GetEffect();
if (!effect) {
return;
}
Maybe<NonOwningAnimationTarget> animationTarget = effect->GetTarget();
mozilla::dom::KeyframeEffectReadOnly* keyframeEffect =
effect->AsKeyframeEffect();
if (!keyframeEffect) {
return;
}
Maybe<NonOwningAnimationTarget> animationTarget = keyframeEffect->GetTarget();
if (!animationTarget) {
return;
}

Просмотреть файл

@ -3637,13 +3637,18 @@ nsDOMWindowUtils::GetOMTAStyle(nsIDOMElement* aElement,
FrameLayerBuilder::GetDedicatedLayer(frame,
nsDisplayItem::TYPE_OPACITY);
if (layer) {
float value;
ShadowLayerForwarder* forwarder = layer->Manager()->AsShadowForwarder();
if (forwarder && forwarder->HasShadowManager()) {
forwarder->GetShadowManager()->SendGetOpacity(
layer->AsShadowableLayer()->GetShadow(), &value);
cssValue = new nsROCSSPrimitiveValue;
cssValue->SetNumber(value);
float value;
bool hadAnimatedOpacity;
forwarder->GetShadowManager()->SendGetAnimationOpacity(
layer->AsShadowableLayer()->GetShadow(),
&value, &hadAnimatedOpacity);
if (hadAnimatedOpacity) {
cssValue = new nsROCSSPrimitiveValue;
cssValue->SetNumber(value);
}
}
}
} else if (aProperty.EqualsLiteral("transform")) {

Просмотреть файл

@ -231,8 +231,11 @@ nsNodeUtils::ContentRemoved(nsINode* aContainer,
Maybe<NonOwningAnimationTarget>
nsNodeUtils::GetTargetForAnimation(const Animation* aAnimation)
{
KeyframeEffectReadOnly* effect = aAnimation->GetEffect();
return effect ? effect->GetTarget() : Nothing();
AnimationEffectReadOnly* effect = aAnimation->GetEffect();
if (!effect || !effect->AsKeyframeEffect()) {
return Nothing();
}
return effect->AsKeyframeEffect()->GetTarget();
}
void

Просмотреть файл

@ -102,6 +102,14 @@ CaptureStreamTestHelper.prototype = {
return px.some((ch, i) => Math.abs(ch - refColor.data[i]) > threshold);
},
/*
* Behaves like isPixelNot but ignores the alpha channel.
*/
isOpaquePixelNot: function(px, refColor, threshold) {
px[3] = refColor.data[3];
return h.isPixelNot(px, refColor, threshold);
},
/*
* Returns a promise that resolves when the provided function |test|
* returns true.

Просмотреть файл

@ -686,8 +686,10 @@ HTMLCanvasElement::CaptureStream(const Optional<double>& aFrameRate,
return nullptr;
}
RefPtr<MediaStreamTrack> track =
stream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO,
new BasicUnstoppableTrackSource(principal));
stream->AddTrackInternal(track);
rv = RegisterFrameCaptureListener(stream->FrameCaptureListener());
if (NS_FAILED(rv)) {

Просмотреть файл

@ -798,8 +798,7 @@ HTMLMediaElement::SetVisible(bool aVisible)
return;
}
mDecoder->NotifyOwnerActivityChanged(aVisible);
mDecoder->SetForcedHidden(!aVisible);
}
already_AddRefed<DOMMediaStream>
@ -939,7 +938,6 @@ void HTMLMediaElement::AbortExistingLoads()
// We need to remove StreamSizeListener before VideoTracks get emptied.
if (mMediaStreamSizeListener) {
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = nullptr;
mMediaStreamSizeListener->Forget();
mMediaStreamSizeListener = nullptr;
}
@ -1241,6 +1239,7 @@ void HTMLMediaElement::NotifyLoadError()
void HTMLMediaElement::NotifyMediaTrackEnabled(MediaTrack* aTrack)
{
MOZ_ASSERT(aTrack);
if (!aTrack) {
return;
}
@ -1248,19 +1247,137 @@ void HTMLMediaElement::NotifyMediaTrackEnabled(MediaTrack* aTrack)
nsString id;
aTrack->GetId(id);
LOG(LogLevel::Debug, ("MediaElement %p MediaStreamTrack enabled with id %s",
this, NS_ConvertUTF16toUTF8(id).get()));
LOG(LogLevel::Debug, ("MediaElement %p %sTrack with id %s enabled",
this, aTrack->AsAudioTrack() ? "Audio" : "Video",
NS_ConvertUTF16toUTF8(id).get()));
#endif
// TODO: We are dealing with single audio track and video track for now.
if (AudioTrack* track = aTrack->AsAudioTrack()) {
if (!track->Enabled()) {
SetMutedInternal(mMuted | MUTED_BY_AUDIO_TRACK);
} else {
SetMutedInternal(mMuted & ~MUTED_BY_AUDIO_TRACK);
MOZ_ASSERT((aTrack->AsAudioTrack() && aTrack->AsAudioTrack()->Enabled()) ||
(aTrack->AsVideoTrack() && aTrack->AsVideoTrack()->Selected()));
if (aTrack->AsAudioTrack()) {
SetMutedInternal(mMuted & ~MUTED_BY_AUDIO_TRACK);
} else if (aTrack->AsVideoTrack()) {
if (!IsVideo()) {
MOZ_ASSERT(false);
return;
}
} else if (VideoTrack* track = aTrack->AsVideoTrack()) {
mDisableVideo = !track->Selected();
mDisableVideo = false;
} else {
MOZ_ASSERT(false, "Unknown track type");
}
if (mSrcStream) {
if (aTrack->AsVideoTrack()) {
MOZ_ASSERT(!mSelectedVideoStreamTrack);
MOZ_ASSERT(!mMediaStreamSizeListener);
mSelectedVideoStreamTrack = aTrack->AsVideoTrack()->GetVideoStreamTrack();
VideoFrameContainer* container = GetVideoFrameContainer();
if (mSrcStreamIsPlaying && container) {
mSelectedVideoStreamTrack->AddVideoOutput(container);
}
HTMLVideoElement* self = static_cast<HTMLVideoElement*>(this);
if (self->VideoWidth() <= 1 && self->VideoHeight() <= 1) {
// MediaInfo uses dummy values of 1 for width and height to
// mark video as valid. We need a new stream size listener
// if size is 0x0 or 1x1.
mMediaStreamSizeListener = new StreamSizeListener(this);
mSelectedVideoStreamTrack->AddDirectListener(mMediaStreamSizeListener);
}
}
if (mReadyState == HAVE_NOTHING) {
// No MediaStreamTracks are captured until we have metadata.
return;
}
for (OutputMediaStream& ms : mOutputStreams) {
if (aTrack->AsVideoTrack() && ms.mCapturingAudioOnly) {
// If the output stream is for audio only we ignore video tracks.
continue;
}
AddCaptureMediaTrackToOutputStream(aTrack, ms);
}
}
}
void HTMLMediaElement::NotifyMediaTrackDisabled(MediaTrack* aTrack)
{
MOZ_ASSERT(aTrack);
if (!aTrack) {
return;
}
#ifdef DEBUG
nsString id;
aTrack->GetId(id);
LOG(LogLevel::Debug, ("MediaElement %p %sTrack with id %s disabled",
this, aTrack->AsAudioTrack() ? "Audio" : "Video",
NS_ConvertUTF16toUTF8(id).get()));
#endif
MOZ_ASSERT((!aTrack->AsAudioTrack() || !aTrack->AsAudioTrack()->Enabled()) &&
(!aTrack->AsVideoTrack() || !aTrack->AsVideoTrack()->Selected()));
if (aTrack->AsAudioTrack()) {
bool shouldMute = true;
for (uint32_t i = 0; i < AudioTracks()->Length(); ++i) {
if ((*AudioTracks())[i]->Enabled()) {
shouldMute = false;
break;
}
}
if (shouldMute) {
SetMutedInternal(mMuted | MUTED_BY_AUDIO_TRACK);
}
} else if (aTrack->AsVideoTrack()) {
if (mSrcStream) {
MOZ_ASSERT(mSelectedVideoStreamTrack);
if (mSelectedVideoStreamTrack && mMediaStreamSizeListener) {
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
mMediaStreamSizeListener->Forget();
mMediaStreamSizeListener = nullptr;
}
VideoFrameContainer* container = GetVideoFrameContainer();
if (mSrcStreamIsPlaying && container) {
mSelectedVideoStreamTrack->RemoveVideoOutput(container);
}
mSelectedVideoStreamTrack = nullptr;
}
}
for (OutputMediaStream& ms : mOutputStreams) {
if (ms.mCapturingDecoder) {
MOZ_ASSERT(!ms.mCapturingMediaStream);
continue;
}
MOZ_ASSERT(ms.mCapturingMediaStream);
for (int32_t i = ms.mTrackPorts.Length() - 1; i >= 0; --i) {
if (ms.mTrackPorts[i].first() == aTrack->GetId()) {
// The source of this track just ended. Force-notify that it ended.
// If we bounce it to the MediaStreamGraph it might not be picked up,
// for instance if the MediaInputPort was destroyed in the same
// iteration as it was added.
MediaStreamTrack* outputTrack = ms.mStream->FindOwnedDOMTrack(
ms.mTrackPorts[i].second()->GetDestination(),
ms.mTrackPorts[i].second()->GetDestinationTrackId());
MOZ_ASSERT(outputTrack);
if (outputTrack) {
NS_DispatchToMainThread(
NewRunnableMethod(outputTrack, &MediaStreamTrack::NotifyEnded));
}
ms.mTrackPorts[i].second()->Destroy();
ms.mTrackPorts.RemoveElementAt(i);
break;
}
}
#ifdef DEBUG
for (auto pair : ms.mTrackPorts) {
MOZ_ASSERT(pair.first() != aTrack->GetId(),
"The same MediaTrack was forwarded to the output stream more than once. This shouldn't happen.");
}
#endif
}
}
@ -1272,6 +1389,8 @@ void HTMLMediaElement::NotifyMediaStreamTracksAvailable(DOMMediaStream* aStream)
LOG(LogLevel::Debug, ("MediaElement %p MediaStream tracks available", this));
mSrcStreamTracksAvailable = true;
bool videoHasChanged = IsVideo() && HasVideo() != !VideoTracks()->IsEmpty();
if (videoHasChanged) {
@ -2094,16 +2213,91 @@ NS_IMETHODIMP HTMLMediaElement::SetMuted(bool aMuted)
return NS_OK;
}
class HTMLMediaElement::CaptureStreamTrackSource :
class HTMLMediaElement::StreamCaptureTrackSource :
public MediaStreamTrackSource,
public MediaStreamTrackSource::Sink
{
public:
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(StreamCaptureTrackSource,
MediaStreamTrackSource)
explicit StreamCaptureTrackSource(MediaStreamTrackSource* aCapturedTrackSource)
: MediaStreamTrackSource(aCapturedTrackSource->GetPrincipal(),
true,
nsString())
, mCapturedTrackSource(aCapturedTrackSource)
{
mCapturedTrackSource->RegisterSink(this);
}
void Destroy() override
{
MOZ_ASSERT(mCapturedTrackSource);
if (mCapturedTrackSource) {
mCapturedTrackSource->UnregisterSink(this);
}
}
MediaSourceEnum GetMediaSource() const override
{
return MediaSourceEnum::Other;
}
CORSMode GetCORSMode() const override
{
return mCapturedTrackSource->GetCORSMode();
}
already_AddRefed<PledgeVoid>
ApplyConstraints(nsPIDOMWindowInner* aWindow,
const dom::MediaTrackConstraints& aConstraints) override
{
RefPtr<PledgeVoid> p = new PledgeVoid();
p->Reject(new dom::MediaStreamError(aWindow,
NS_LITERAL_STRING("OverconstrainedError"),
NS_LITERAL_STRING("")));
return p.forget();
}
void Stop() override
{
NS_ERROR("We're reporting remote=true to not be stoppable. "
"Stop() should not be called.");
}
void PrincipalChanged() override
{
mPrincipal = mCapturedTrackSource->GetPrincipal();
MediaStreamTrackSource::PrincipalChanged();
}
private:
virtual ~StreamCaptureTrackSource() {}
RefPtr<MediaStreamTrackSource> mCapturedTrackSource;
};
NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
MediaStreamTrackSource)
NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
MediaStreamTrackSource)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource)
NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource)
NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
MediaStreamTrackSource,
mCapturedTrackSource)
class HTMLMediaElement::DecoderCaptureTrackSource :
public MediaStreamTrackSource,
public DecoderPrincipalChangeObserver
{
public:
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(CaptureStreamTrackSource,
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DecoderCaptureTrackSource,
MediaStreamTrackSource)
explicit CaptureStreamTrackSource(HTMLMediaElement* aElement)
explicit DecoderCaptureTrackSource(HTMLMediaElement* aElement)
: MediaStreamTrackSource(nsCOMPtr<nsIPrincipal>(aElement->GetCurrentPrincipal()).get(),
true,
nsString())
@ -2157,20 +2351,20 @@ public:
}
protected:
virtual ~CaptureStreamTrackSource()
virtual ~DecoderCaptureTrackSource()
{
}
RefPtr<HTMLMediaElement> mElement;
};
NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::CaptureStreamTrackSource,
NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource,
MediaStreamTrackSource)
NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::CaptureStreamTrackSource,
NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource,
MediaStreamTrackSource)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::CaptureStreamTrackSource)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource)
NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource)
NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::CaptureStreamTrackSource,
NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource,
MediaStreamTrackSource,
mElement)
@ -2188,13 +2382,18 @@ public:
already_AddRefed<dom::MediaStreamTrackSource>
GetMediaStreamTrackSource(TrackID aInputTrackID) override
{
if (mElement && mElement->mSrcStream) {
NS_ERROR("Captured media element playing a stream adds tracks explicitly on main thread.");
return nullptr;
}
// We can return a new source each time here, even for different streams,
// since the sources don't keep any internal state and all of them call
// through to the same HTMLMediaElement.
// If this changes (after implementing Stop()?) we'll have to ensure we
// return the same source for all requests to the same TrackID, and only
// have one getter.
return do_AddRef(new CaptureStreamTrackSource(mElement));
return do_AddRef(new DecoderCaptureTrackSource(mElement));
}
protected:
@ -2213,8 +2412,118 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGet
MediaStreamTrackSourceGetter,
mElement)
void
HTMLMediaElement::SetCapturedOutputStreamsEnabled(bool aEnabled) {
for (OutputMediaStream& ms : mOutputStreams) {
if (ms.mCapturingDecoder) {
MOZ_ASSERT(!ms.mCapturingMediaStream);
continue;
}
for (auto pair : ms.mTrackPorts) {
MediaStream* outputSource = ms.mStream->GetInputStream();
if (!outputSource) {
NS_ERROR("No output source stream");
return;
}
TrackID id = pair.second()->GetDestinationTrackId();
outputSource->SetTrackEnabled(id, aEnabled ? DisabledTrackMode::ENABLED
: DisabledTrackMode::SILENCE_FREEZE);
LOG(LogLevel::Debug,
("%s track %d for captured MediaStream %p",
aEnabled ? "Enabled" : "Disabled", id, ms.mStream.get()));
}
}
}
void
HTMLMediaElement::AddCaptureMediaTrackToOutputStream(MediaTrack* aTrack,
OutputMediaStream& aOutputStream,
bool aAsyncAddtrack)
{
if (aOutputStream.mCapturingDecoder) {
MOZ_ASSERT(!aOutputStream.mCapturingMediaStream);
return;
}
aOutputStream.mCapturingMediaStream = true;
MediaStream* outputSource = aOutputStream.mStream->GetInputStream();
if (!outputSource) {
NS_ERROR("No output source stream");
return;
}
ProcessedMediaStream* processedOutputSource =
outputSource->AsProcessedStream();
if (!processedOutputSource) {
NS_ERROR("Input stream not a ProcessedMediaStream");
return;
}
if (!aTrack) {
MOZ_ASSERT(false, "Bad MediaTrack");
return;
}
MediaStreamTrack* inputTrack = mSrcStream->GetTrackById(aTrack->GetId());
MOZ_ASSERT(inputTrack);
if (!inputTrack) {
NS_ERROR("Input track not found in source stream");
return;
}
#if DEBUG
for (auto pair : aOutputStream.mTrackPorts) {
MOZ_ASSERT(pair.first() != aTrack->GetId(),
"Captured track already captured to output stream");
}
#endif
TrackID destinationTrackID = aOutputStream.mNextAvailableTrackID++;
RefPtr<MediaStreamTrackSource> source =
new StreamCaptureTrackSource(&inputTrack->GetSource());
MediaSegment::Type type = inputTrack->AsAudioStreamTrack()
? MediaSegment::AUDIO
: MediaSegment::VIDEO;
RefPtr<MediaStreamTrack> track =
aOutputStream.mStream->CreateDOMTrack(destinationTrackID, type, source);
if (aAsyncAddtrack) {
NS_DispatchToMainThread(
NewRunnableMethod<StorensRefPtrPassByPtr<MediaStreamTrack>>(
aOutputStream.mStream, &DOMMediaStream::AddTrackInternal, track));
} else {
aOutputStream.mStream->AddTrackInternal(track);
}
// Track is muted initially, so we don't leak data if it's added while paused
// and an MSG iteration passes before the mute comes into effect.
processedOutputSource->SetTrackEnabled(destinationTrackID,
DisabledTrackMode::SILENCE_FREEZE);
RefPtr<MediaInputPort> port =
inputTrack->ForwardTrackContentsTo(processedOutputSource,
destinationTrackID);
Pair<nsString, RefPtr<MediaInputPort>> p(aTrack->GetId(), port);
aOutputStream.mTrackPorts.AppendElement(Move(p));
if (mSrcStreamIsPlaying) {
processedOutputSource->SetTrackEnabled(destinationTrackID,
DisabledTrackMode::ENABLED);
}
LOG(LogLevel::Debug,
("Created %s track %p with id %d from track %p through MediaInputPort %p",
inputTrack->AsAudioStreamTrack() ? "audio" : "video",
track.get(), destinationTrackID, inputTrack, port.get()));
}
already_AddRefed<DOMMediaStream>
HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
bool aCaptureAudio,
MediaStreamGraph* aGraph)
{
nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
@ -2243,26 +2552,91 @@ HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
MediaStreamTrackSourceGetter* getter = new CaptureStreamTrackSourceGetter(this);
out->mStream = DOMMediaStream::CreateTrackUnionStreamAsInput(window, aGraph, getter);
out->mFinishWhenEnded = aFinishWhenEnded;
out->mCapturingAudioOnly = aCaptureAudio;
if (aCaptureAudio) {
if (mSrcStream) {
// We don't support applying volume and mute to the captured stream, when
// capturing a MediaStream.
nsContentUtils::ReportToConsole(nsIScriptError::errorFlag,
NS_LITERAL_CSTRING("Media"),
OwnerDoc(),
nsContentUtils::eDOM_PROPERTIES,
"MediaElementAudioCaptureOfMediaStreamError");
return nullptr;
}
// mAudioCaptured tells the user that the audio played by this media element
// is being routed to the captureStreams *instead* of being played to
// speakers.
mAudioCaptured = true;
}
if (mReadyState == HAVE_NOTHING) {
// Do not expose the tracks directly before we have metadata.
RefPtr<DOMMediaStream> result = out->mStream;
return result.forget();
}
mAudioCaptured = true;
if (mDecoder) {
out->mCapturingDecoder = true;
mDecoder->AddOutputStream(out->mStream->GetInputStream()->AsProcessedStream(),
aFinishWhenEnded);
if (mReadyState >= HAVE_METADATA) {
// Expose the tracks to JS directly.
if (HasAudio()) {
TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
RefPtr<MediaStreamTrackSource> trackSource =
getter->GetMediaStreamTrackSource(audioTrackId);
if (HasAudio()) {
TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
RefPtr<MediaStreamTrackSource> trackSource =
getter->GetMediaStreamTrackSource(audioTrackId);
RefPtr<MediaStreamTrack> track =
out->mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO,
trackSource);
}
if (HasVideo()) {
TrackID videoTrackId = mMediaInfo.mVideo.mTrackId;
RefPtr<MediaStreamTrackSource> trackSource =
getter->GetMediaStreamTrackSource(videoTrackId);
out->mStream->AddTrackInternal(track);
LOG(LogLevel::Debug,
("Created audio track %d for captured decoder", audioTrackId));
}
if (IsVideo() && HasVideo() && !out->mCapturingAudioOnly) {
TrackID videoTrackId = mMediaInfo.mVideo.mTrackId;
RefPtr<MediaStreamTrackSource> trackSource =
getter->GetMediaStreamTrackSource(videoTrackId);
RefPtr<MediaStreamTrack> track =
out->mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO,
trackSource);
out->mStream->AddTrackInternal(track);
LOG(LogLevel::Debug,
("Created video track %d for captured decoder", videoTrackId));
}
}
if (mSrcStream) {
out->mCapturingMediaStream = true;
MediaStream* inputStream = out->mStream->GetInputStream();
if (!inputStream) {
NS_ERROR("No input stream");
RefPtr<DOMMediaStream> result = out->mStream;
return result.forget();
}
ProcessedMediaStream* processedInputStream =
inputStream->AsProcessedStream();
if (!processedInputStream) {
NS_ERROR("Input stream not a ProcessedMediaStream");
RefPtr<DOMMediaStream> result = out->mStream;
return result.forget();
}
for (size_t i = 0; i < AudioTracks()->Length(); ++i) {
AudioTrack* t = (*AudioTracks())[i];
if (t->Enabled()) {
AddCaptureMediaTrackToOutputStream(t, *out, false);
}
}
if (IsVideo() && !out->mCapturingAudioOnly) {
// Only add video tracks if we're a video element and the output stream
// wants video.
for (size_t i = 0; i < VideoTracks()->Length(); ++i) {
VideoTrack* t = (*VideoTracks())[i];
if (t->Selected()) {
AddCaptureMediaTrackToOutputStream(t, *out, false);
}
}
}
}
@ -2270,6 +2644,19 @@ HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
return result.forget();
}
already_AddRefed<DOMMediaStream>
HTMLMediaElement::CaptureAudio(ErrorResult& aRv,
MediaStreamGraph* aGraph)
{
RefPtr<DOMMediaStream> stream = CaptureStreamInternal(false, aGraph);
if (!stream) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
return stream.forget();
}
already_AddRefed<DOMMediaStream>
HTMLMediaElement::MozCaptureStream(ErrorResult& aRv,
MediaStreamGraph* aGraph)
@ -2462,6 +2849,7 @@ NS_IMPL_ISUPPORTS(HTMLMediaElement::ShutdownObserver, nsIObserver)
HTMLMediaElement::HTMLMediaElement(already_AddRefed<mozilla::dom::NodeInfo>& aNodeInfo)
: nsGenericHTMLElement(aNodeInfo),
mWatchManager(this, AbstractThread::MainThread()),
mSrcStreamTracksAvailable(false),
mSrcStreamPausedCurrentTime(-1),
mShutdownObserver(new ShutdownObserver),
mCurrentLoadID(0),
@ -2807,6 +3195,20 @@ HTMLMediaElement::WakeLockRelease()
}
}
HTMLMediaElement::OutputMediaStream::OutputMediaStream()
: mFinishWhenEnded(false)
, mCapturingAudioOnly(false)
, mCapturingDecoder(false)
, mCapturingMediaStream(false)
, mNextAvailableTrackID(1) {}
HTMLMediaElement::OutputMediaStream::~OutputMediaStream()
{
for (auto pair : mTrackPorts) {
pair.second()->Destroy();
}
}
bool HTMLMediaElement::ParseAttribute(int32_t aNamespaceID,
nsIAtom* aAttribute,
const nsAString& aValue,
@ -3424,10 +3826,15 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
return rv;
}
for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) {
OutputMediaStream* ms = &mOutputStreams[i];
aDecoder->AddOutputStream(ms->mStream->GetInputStream()->AsProcessedStream(),
ms->mFinishWhenEnded);
for (OutputMediaStream& ms : mOutputStreams) {
if (ms.mCapturingMediaStream) {
MOZ_ASSERT(!ms.mCapturingDecoder);
continue;
}
ms.mCapturingDecoder = true;
aDecoder->AddOutputStream(ms.mStream->GetInputStream()->AsProcessedStream(),
ms.mFinishWhenEnded);
}
#ifdef MOZ_EME
@ -3681,13 +4088,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
if (mSelectedVideoStreamTrack && container) {
mSelectedVideoStreamTrack->AddVideoOutput(container);
}
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
if (videoTrack) {
VideoStreamTrack* videoStreamTrack = videoTrack->GetVideoStreamTrack();
if (videoStreamTrack && container) {
videoStreamTrack->AddVideoOutput(container);
}
}
SetCapturedOutputStreamsEnabled(true); // Unmute
} else {
if (stream) {
mSrcStreamPausedCurrentTime = CurrentTime();
@ -3699,13 +4101,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
if (mSelectedVideoStreamTrack && container) {
mSelectedVideoStreamTrack->RemoveVideoOutput(container);
}
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
if (videoTrack) {
VideoStreamTrack* videoStreamTrack = videoTrack->GetVideoStreamTrack();
if (videoStreamTrack && container) {
videoStreamTrack->RemoveVideoOutput(container);
}
}
SetCapturedOutputStreamsEnabled(false); // Mute
}
// If stream is null, then DOMMediaStream::Destroy must have been
// called and that will remove all listeners/outputs.
@ -3744,7 +4141,11 @@ void HTMLMediaElement::SetupSrcMediaStreamPlayback(DOMMediaStream* aStream)
// If we pause this media element, track changes in the underlying stream
// will continue to fire events at this element and alter its track list.
// That's simpler than delaying the events, but probably confusing...
ConstructMediaTracks();
nsTArray<RefPtr<MediaStreamTrack>> tracks;
mSrcStream->GetTracks(tracks);
for (const RefPtr<MediaStreamTrack>& track : tracks) {
NotifyMediaStreamTrackAdded(track);
}
mSrcStream->OnTracksAvailable(new MediaStreamTracksAvailableCallback(this));
mMediaStreamTrackListener = new MediaStreamTrackListener(this);
@ -3767,18 +4168,29 @@ void HTMLMediaElement::EndSrcMediaStreamPlayback()
UpdateSrcMediaStreamPlaying(REMOVING_SRC_STREAM);
if (mMediaStreamSizeListener) {
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = nullptr;
MOZ_ASSERT(mSelectedVideoStreamTrack);
if (mSelectedVideoStreamTrack) {
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
}
mMediaStreamSizeListener->Forget();
mMediaStreamSizeListener = nullptr;
}
mSelectedVideoStreamTrack = nullptr;
mMediaStreamSizeListener = nullptr;
mSrcStream->UnregisterTrackListener(mMediaStreamTrackListener);
mMediaStreamTrackListener = nullptr;
mSrcStreamTracksAvailable = false;
mSrcStream->RemovePrincipalChangeObserver(this);
mSrcStreamVideoPrincipal = nullptr;
for (OutputMediaStream& ms : mOutputStreams) {
for (auto pair : ms.mTrackPorts) {
pair.second()->Destroy();
}
ms.mTrackPorts.Clear();
}
mSrcStream = nullptr;
}
@ -3791,8 +4203,7 @@ CreateAudioTrack(AudioStreamTrack* aStreamTrack)
aStreamTrack->GetLabel(label);
return MediaTrackList::CreateAudioTrack(id, NS_LITERAL_STRING("main"),
label, EmptyString(),
aStreamTrack->Enabled());
label, EmptyString(), true);
}
static already_AddRefed<VideoTrack>
@ -3808,57 +4219,22 @@ CreateVideoTrack(VideoStreamTrack* aStreamTrack)
aStreamTrack);
}
void HTMLMediaElement::ConstructMediaTracks()
{
nsTArray<RefPtr<MediaStreamTrack>> tracks;
mSrcStream->GetTracks(tracks);
int firstEnabledVideo = -1;
for (const RefPtr<MediaStreamTrack>& track : tracks) {
if (track->Ended()) {
continue;
}
if (AudioStreamTrack* t = track->AsAudioStreamTrack()) {
RefPtr<AudioTrack> audioTrack = CreateAudioTrack(t);
AudioTracks()->AddTrack(audioTrack);
} else if (VideoStreamTrack* t = track->AsVideoStreamTrack()) {
RefPtr<VideoTrack> videoTrack = CreateVideoTrack(t);
VideoTracks()->AddTrack(videoTrack);
firstEnabledVideo = (t->Enabled() && firstEnabledVideo < 0)
? (VideoTracks()->Length() - 1)
: firstEnabledVideo;
}
}
if (VideoTracks()->Length() > 0) {
// If media resource does not indicate a particular set of video tracks to
// enable, the one that is listed first in the element's videoTracks object
// must be selected.
int index = firstEnabledVideo >= 0 ? firstEnabledVideo : 0;
(*VideoTracks())[index]->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
VideoTrack* track = (*VideoTracks())[index];
VideoStreamTrack* streamTrack = track->GetVideoStreamTrack();
mMediaStreamSizeListener = new StreamSizeListener(this);
streamTrack->AddDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = streamTrack;
if (GetVideoFrameContainer()) {
mSelectedVideoStreamTrack->AddVideoOutput(GetVideoFrameContainer());
}
}
}
void
HTMLMediaElement::NotifyMediaStreamTrackAdded(const RefPtr<MediaStreamTrack>& aTrack)
{
MOZ_ASSERT(aTrack);
if (aTrack->Ended()) {
return;
}
#ifdef DEBUG
nsString id;
aTrack->GetId(id);
LOG(LogLevel::Debug, ("%p, Adding MediaTrack with id %s",
this, NS_ConvertUTF16toUTF8(id).get()));
LOG(LogLevel::Debug, ("%p, Adding %sTrack with id %s",
this, aTrack->AsAudioStreamTrack() ? "Audio" : "Video",
NS_ConvertUTF16toUTF8(id).get()));
#endif
if (AudioStreamTrack* t = aTrack->AsAudioStreamTrack()) {
@ -3866,24 +4242,20 @@ HTMLMediaElement::NotifyMediaStreamTrackAdded(const RefPtr<MediaStreamTrack>& aT
AudioTracks()->AddTrack(audioTrack);
} else if (VideoStreamTrack* t = aTrack->AsVideoStreamTrack()) {
// TODO: Fix this per the spec on bug 1273443.
int32_t selectedIndex = VideoTracks()->SelectedIndex();
if (!IsVideo()) {
return;
}
RefPtr<VideoTrack> videoTrack = CreateVideoTrack(t);
VideoTracks()->AddTrack(videoTrack);
// New MediaStreamTrack added, set the new added video track as selected
// video track when there is no selected track.
if (selectedIndex == -1) {
if (VideoTracks()->SelectedIndex() == -1) {
MOZ_ASSERT(!mSelectedVideoStreamTrack);
videoTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
mMediaStreamSizeListener = new StreamSizeListener(this);
t->AddDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = t;
VideoFrameContainer* container = GetVideoFrameContainer();
if (mSrcStreamIsPlaying && container) {
mSelectedVideoStreamTrack->AddVideoOutput(container);
}
}
}
mWatchManager.ManualNotify(&HTMLMediaElement::UpdateReadyStateInternal);
}
void
@ -3894,62 +4266,14 @@ HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr<MediaStreamTrack>&
nsAutoString id;
aTrack->GetId(id);
LOG(LogLevel::Debug, ("%p, Removing MediaTrack with id %s",
this, NS_ConvertUTF16toUTF8(id).get()));
LOG(LogLevel::Debug, ("%p, Removing %sTrack with id %s",
this, aTrack->AsAudioStreamTrack() ? "Audio" : "Video",
NS_ConvertUTF16toUTF8(id).get()));
if (MediaTrack* t = AudioTracks()->GetTrackById(id)) {
AudioTracks()->RemoveTrack(t);
} else if (MediaTrack* t = VideoTracks()->GetTrackById(id)) {
VideoTracks()->RemoveTrack(t);
// TODO: Fix this per the spec on bug 1273443.
// If the removed media stream track is selected video track and there are
// still video tracks, change the selected video track to the first
// remaining track.
if (aTrack == mSelectedVideoStreamTrack) {
// The mMediaStreamSizeListener might already reset to nullptr.
if (mMediaStreamSizeListener) {
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
}
VideoFrameContainer* container = GetVideoFrameContainer();
if (mSrcStreamIsPlaying && container) {
mSelectedVideoStreamTrack->RemoveVideoOutput(container);
}
mSelectedVideoStreamTrack = nullptr;
MOZ_ASSERT(mSrcStream);
nsTArray<RefPtr<VideoStreamTrack>> tracks;
mSrcStream->GetVideoTracks(tracks);
for (const RefPtr<VideoStreamTrack>& track : tracks) {
if (track->Ended()) {
continue;
}
if (!track->Enabled()) {
continue;
}
nsAutoString trackId;
track->GetId(trackId);
MediaTrack* videoTrack = VideoTracks()->GetTrackById(trackId);
MOZ_ASSERT(videoTrack);
videoTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
if (mMediaStreamSizeListener) {
track->AddDirectListener(mMediaStreamSizeListener);
}
mSelectedVideoStreamTrack = track;
if (container) {
mSelectedVideoStreamTrack->AddVideoOutput(container);
}
return;
}
// There is no enabled video track existing, clean the
// mMediaStreamSizeListener.
if (mMediaStreamSizeListener) {
mMediaStreamSizeListener->Forget();
mMediaStreamSizeListener = nullptr;
}
}
} else {
// XXX (bug 1208328) Uncomment this when DOMMediaStream doesn't call
// NotifyTrackRemoved multiple times for the same track, i.e., when it
@ -4033,6 +4357,28 @@ void HTMLMediaElement::MetadataLoaded(const MediaInfo* aInfo,
SetCurrentTime(mDefaultPlaybackStartPosition);
mDefaultPlaybackStartPosition = 0.0;
}
if (!mSrcStream) {
return;
}
for (OutputMediaStream& ms : mOutputStreams) {
for (size_t i = 0; i < AudioTracks()->Length(); ++i) {
AudioTrack* t = (*AudioTracks())[i];
if (t->Enabled()) {
AddCaptureMediaTrackToOutputStream(t, ms);
}
}
if (IsVideo() && !ms.mCapturingAudioOnly) {
// Only add video tracks if we're a video element and the output stream
// wants video.
for (size_t i = 0; i < VideoTracks()->Length(); ++i) {
VideoTrack* t = (*VideoTracks())[i];
if (t->Selected()) {
AddCaptureMediaTrackToOutputStream(t, ms);
}
}
}
}
}
void HTMLMediaElement::FirstFrameLoaded()
@ -4137,6 +4483,8 @@ void HTMLMediaElement::PlaybackEnded()
// Discard all output streams that have finished now.
for (int32_t i = mOutputStreams.Length() - 1; i >= 0; --i) {
if (mOutputStreams[i].mFinishWhenEnded) {
LOG(LogLevel::Debug, ("Playback ended. Removing output stream %p",
mOutputStreams[i].mStream.get()));
mOutputStreams.RemoveElementAt(i);
}
}
@ -4342,9 +4690,14 @@ HTMLMediaElement::UpdateReadyStateInternal()
}
if (mSrcStream && mReadyState < nsIDOMHTMLMediaElement::HAVE_METADATA) {
if (!mSrcStreamTracksAvailable) {
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"MediaStreamTracks not available yet", this));
return;
}
bool hasAudioTracks = !AudioTracks()->IsEmpty();
bool hasVideoTracks = !VideoTracks()->IsEmpty();
if (!hasAudioTracks && !hasVideoTracks) {
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Stream with no tracks", this));
@ -4875,6 +5228,12 @@ void HTMLMediaElement::UpdateInitialMediaSize(const nsIntSize& aSize)
if (!mMediaStreamSizeListener) {
return;
}
if (!mSelectedVideoStreamTrack) {
MOZ_ASSERT(false);
return;
}
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
mMediaStreamSizeListener->Forget();
mMediaStreamSizeListener = nullptr;

Просмотреть файл

@ -316,7 +316,8 @@ public:
*/
bool RemoveDecoderPrincipalChangeObserver(DecoderPrincipalChangeObserver* aObserver);
class CaptureStreamTrackSource;
class StreamCaptureTrackSource;
class DecoderCaptureTrackSource;
class CaptureStreamTrackSourceGetter;
// Update the visual size of the media. Called from the decoder on the
@ -343,8 +344,18 @@ public:
*/
void NotifyLoadError();
/**
* Called by one of our associated MediaTrackLists (audio/video) when an
* AudioTrack is enabled or a VideoTrack is selected.
*/
void NotifyMediaTrackEnabled(MediaTrack* aTrack);
/**
* Called by one of our associated MediaTrackLists (audio/video) when an
* AudioTrack is disabled or a VideoTrack is unselected.
*/
void NotifyMediaTrackDisabled(MediaTrack* aTrack);
/**
* Called when tracks become available to the source media stream.
*/
@ -648,6 +659,9 @@ public:
return mAutoplayEnabled;
}
already_AddRefed<DOMMediaStream> CaptureAudio(ErrorResult& aRv,
MediaStreamGraph* aGraph = nullptr);
already_AddRefed<DOMMediaStream> MozCaptureStream(ErrorResult& aRv,
MediaStreamGraph* aGraph = nullptr);
@ -774,6 +788,23 @@ protected:
nsCOMPtr<nsITimer> mTimer;
};
// Holds references to the DOM wrappers for the MediaStreams that we're
// writing to.
struct OutputMediaStream {
OutputMediaStream();
~OutputMediaStream();
RefPtr<DOMMediaStream> mStream;
bool mFinishWhenEnded;
bool mCapturingAudioOnly;
bool mCapturingDecoder;
bool mCapturingMediaStream;
// The following members are keeping state for a captured MediaStream.
TrackID mNextAvailableTrackID;
nsTArray<Pair<nsString, RefPtr<MediaInputPort>>> mTrackPorts;
};
nsresult PlayInternal(bool aCallerIsChrome);
/** Use this method to change the mReadyState member, so required
@ -826,13 +857,6 @@ protected:
enum { REMOVING_SRC_STREAM = 0x1 };
void UpdateSrcMediaStreamPlaying(uint32_t aFlags = 0);
/**
* If loading and playing a MediaStream, for each MediaStreamTrack in the
* MediaStream, create a corresponding AudioTrack or VideoTrack during the
* phase of resource fetching.
*/
void ConstructMediaTracks();
/**
* Called by our DOMMediaStream::TrackListener when a new MediaStreamTrack has
* been added to the playback stream of |mSrcStream|.
@ -846,13 +870,36 @@ protected:
void NotifyMediaStreamTrackRemoved(const RefPtr<MediaStreamTrack>& aTrack);
/**
* Returns an nsDOMMediaStream containing the played contents of this
* Enables or disables all tracks forwarded from mSrcStream to all
* OutputMediaStreams. We do this for muting the tracks when pausing,
* and unmuting when playing the media element again.
*
* If mSrcStream is unset, this does nothing.
*/
void SetCapturedOutputStreamsEnabled(bool aEnabled);
/**
* Create a new MediaStreamTrack for aTrack and add it to the DOMMediaStream
* in aOutputStream. This automatically sets the output track to enabled or
* disabled depending on our current playing state.
*/
void AddCaptureMediaTrackToOutputStream(MediaTrack* aTrack,
OutputMediaStream& aOutputStream,
bool aAsyncAddtrack = true);
/**
* Returns an DOMMediaStream containing the played contents of this
* element. When aFinishWhenEnded is true, when this element ends playback
* we will finish the stream and not play any more into it.
* When aFinishWhenEnded is false, ending playback does not finish the stream.
* The stream will never finish.
*
* When aCaptureAudio is true, we stop playout of audio and instead route it
* to the DOMMediaStream. Volume and mute state will be applied to the audio
* reaching the stream. No video tracks will be captured in this case.
*/
already_AddRefed<DOMMediaStream> CaptureStreamInternal(bool aFinishWhenEnded,
bool aCaptureAudio,
MediaStreamGraph* aGraph = nullptr);
/**
@ -1240,6 +1287,9 @@ protected:
// At most one of mDecoder and mSrcStream can be non-null.
RefPtr<DOMMediaStream> mSrcStream;
// True once mSrcStream's initial set of tracks are known.
bool mSrcStreamTracksAvailable;
// If non-negative, the time we should return for currentTime while playing
// mSrcStream.
double mSrcStreamPausedCurrentTime;
@ -1249,10 +1299,6 @@ protected:
// Holds references to the DOM wrappers for the MediaStreams that we're
// writing to.
struct OutputMediaStream {
RefPtr<DOMMediaStream> mStream;
bool mFinishWhenEnded;
};
nsTArray<OutputMediaStream> mOutputStreams;
// Holds a reference to the MediaStreamListener attached to mSrcStream's

Просмотреть файл

@ -811,13 +811,11 @@ ContentChild::ProvideWindowCommon(TabChild* aTabOpener,
if (NS_FAILED(rv)) {
PRenderFrameChild::Send__delete__(renderFrame);
PBrowserChild::Send__delete__(newChild);
return rv;
}
}
if (!*aWindowIsNew) {
PRenderFrameChild::Send__delete__(renderFrame);
PBrowserChild::Send__delete__(newChild);
return NS_ERROR_ABORT;
}

Просмотреть файл

@ -102,6 +102,7 @@
#ifdef MOZ_ENABLE_PROFILER_SPS
#include "mozilla/ProfileGatherer.h"
#endif
#include "mozilla/ScopeExit.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "mozilla/Telemetry.h"
@ -4402,6 +4403,9 @@ ContentParent::RecvKeywordToURI(const nsCString& aKeyword,
OptionalInputStreamParams* aPostData,
OptionalURIParams* aURI)
{
*aPostData = void_t();
*aURI = void_t();
nsCOMPtr<nsIURIFixup> fixup = do_GetService(NS_URIFIXUP_CONTRACTID);
if (!fixup) {
return true;
@ -4962,6 +4966,14 @@ ContentParent::RecvCreateWindow(PBrowserParent* aThisTab,
TabParent* newTab = TabParent::GetFrom(aNewTab);
MOZ_ASSERT(newTab);
auto destroyNewTabOnError = MakeScopeExit([&] {
if (!*aWindowIsNew || NS_FAILED(*aResult)) {
if (newTab) {
newTab->Destroy();
}
}
});
// Content has requested that we open this new content window, so
// we must have an opener.
newTab->SetHasContentOpener(true);

Просмотреть файл

@ -48,13 +48,20 @@ AudioTrack::SetEnabledInternal(bool aEnabled, int aFlags)
return;
}
if (!(aFlags & MediaTrack::FIRE_NO_EVENTS)) {
mList->CreateAndDispatchChangeEvent();
if (mEnabled) {
HTMLMediaElement* element = mList->GetMediaElement();
if (element) {
element->NotifyMediaTrackEnabled(this);
}
} else {
HTMLMediaElement* element = mList->GetMediaElement();
if (element) {
element->NotifyMediaTrackDisabled(this);
}
}
if (!(aFlags & MediaTrack::FIRE_NO_EVENTS)) {
mList->CreateAndDispatchChangeEvent();
}
}

Просмотреть файл

@ -144,26 +144,36 @@ public:
MediaStreamTrack* track =
mStream->FindOwnedDOMTrack(aInputStream, aInputTrackID, aTrackID);
if (!track) {
// Track had not been created on main thread before, create it now.
NS_WARN_IF_FALSE(!mStream->mTracks.IsEmpty(),
"A new track was detected on the input stream; creating "
"a corresponding MediaStreamTrack. Initial tracks "
"should be added manually to immediately and "
"synchronously be available to JS.");
RefPtr<MediaStreamTrackSource> source;
if (mStream->mTrackSourceGetter) {
source = mStream->mTrackSourceGetter->GetMediaStreamTrackSource(aTrackID);
}
if (!source) {
NS_ASSERTION(false, "Dynamic track created without an explicit TrackSource");
nsPIDOMWindowInner* window = mStream->GetParentObject();
nsIDocument* doc = window ? window->GetExtantDoc() : nullptr;
nsIPrincipal* principal = doc ? doc->NodePrincipal() : nullptr;
source = new BasicUnstoppableTrackSource(principal);
}
track = mStream->CreateDOMTrack(aTrackID, aType, source);
if (track) {
LOG(LogLevel::Debug, ("DOMMediaStream %p Track %d from owned stream %p "
"bound to MediaStreamTrack %p.",
mStream, aTrackID, aInputStream, track));
return;
}
// Track had not been created on main thread before, create it now.
NS_WARN_IF_FALSE(!mStream->mTracks.IsEmpty(),
"A new track was detected on the input stream; creating "
"a corresponding MediaStreamTrack. Initial tracks "
"should be added manually to immediately and "
"synchronously be available to JS.");
RefPtr<MediaStreamTrackSource> source;
if (mStream->mTrackSourceGetter) {
source = mStream->mTrackSourceGetter->GetMediaStreamTrackSource(aTrackID);
}
if (!source) {
NS_ASSERTION(false, "Dynamic track created without an explicit TrackSource");
nsPIDOMWindowInner* window = mStream->GetParentObject();
nsIDocument* doc = window ? window->GetExtantDoc() : nullptr;
nsIPrincipal* principal = doc ? doc->NodePrincipal() : nullptr;
source = new BasicUnstoppableTrackSource(principal);
}
RefPtr<MediaStreamTrack> newTrack =
mStream->CreateDOMTrack(aTrackID, aType, source);
NS_DispatchToMainThread(NewRunnableMethod<RefPtr<MediaStreamTrack>>(
mStream, &DOMMediaStream::AddTrackInternal, newTrack));
}
void DoNotifyTrackEnded(MediaStream* aInputStream, TrackID aInputTrackID,
@ -193,13 +203,13 @@ public:
{
if (aTrackEvents & TrackEventCommand::TRACK_EVENT_CREATED) {
nsCOMPtr<nsIRunnable> runnable =
NewRunnableMethod<TrackID, MediaSegment::Type, MediaStream*, TrackID>(
NewRunnableMethod<TrackID, MediaSegment::Type, RefPtr<MediaStream>, TrackID>(
this, &OwnedStreamListener::DoNotifyTrackCreated,
aID, aQueuedMedia.GetType(), aInputStream, aInputTrackID);
aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
} else if (aTrackEvents & TrackEventCommand::TRACK_EVENT_ENDED) {
nsCOMPtr<nsIRunnable> runnable =
NewRunnableMethod<MediaStream*, TrackID, TrackID>(
NewRunnableMethod<RefPtr<MediaStream>, TrackID, TrackID>(
this, &OwnedStreamListener::DoNotifyTrackEnded,
aInputStream, aInputTrackID, aID);
aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
@ -270,7 +280,11 @@ public:
return;
}
mStream->NotifyTracksCreated();
// The owned stream listener adds its tracks after another main thread
// dispatch. We have to do the same to notify of created tracks to stay
// in sync. (Or NotifyTracksCreated is called before tracks are added).
NS_DispatchToMainThread(
NewRunnableMethod(mStream, &DOMMediaStream::NotifyTracksCreated));
}
// The methods below are called on the MediaStreamGraph thread.
@ -283,7 +297,7 @@ public:
{
if (aTrackEvents & TrackEventCommand::TRACK_EVENT_ENDED) {
nsCOMPtr<nsIRunnable> runnable =
NewRunnableMethod<StorensRefPtrPassByPtr<MediaStream>, TrackID>(
NewRunnableMethod<RefPtr<MediaStream>, TrackID>(
this, &PlaybackStreamListener::DoNotifyTrackEnded, aInputStream, aInputTrackID);
aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
}
@ -809,7 +823,10 @@ DOMMediaStream::InitAudioCaptureStream(nsIPrincipal* aPrincipal, MediaStreamGrap
InitInputStreamCommon(audioCaptureStream, aGraph);
InitOwnedStreamCommon(aGraph);
InitPlaybackStreamCommon(aGraph);
CreateDOMTrack(AUDIO_TRACK, MediaSegment::AUDIO, audioCaptureSource);
RefPtr<MediaStreamTrack> track =
CreateDOMTrack(AUDIO_TRACK, MediaSegment::AUDIO, audioCaptureSource);
AddTrackInternal(track);
audioCaptureStream->Start();
}
@ -977,7 +994,27 @@ DOMMediaStream::RemovePrincipalChangeObserver(
return mPrincipalChangeObservers.RemoveElement(aObserver);
}
MediaStreamTrack*
void
DOMMediaStream::AddTrackInternal(MediaStreamTrack* aTrack)
{
MOZ_ASSERT(aTrack->mOwningStream == this);
MOZ_ASSERT(FindOwnedDOMTrack(aTrack->GetInputStream(),
aTrack->mInputTrackID,
aTrack->mTrackID));
MOZ_ASSERT(!FindPlaybackDOMTrack(aTrack->GetOwnedStream(),
aTrack->mTrackID));
LOG(LogLevel::Debug, ("DOMMediaStream %p Adding owned track %p", this, aTrack));
mTracks.AppendElement(
new TrackPort(mPlaybackPort, aTrack, TrackPort::InputPortOwnership::EXTERNAL));
NotifyTrackAdded(aTrack);
DispatchTrackEvent(NS_LITERAL_STRING("addtrack"), aTrack);
}
already_AddRefed<MediaStreamTrack>
DOMMediaStream::CreateDOMTrack(TrackID aTrackID, MediaSegment::Type aType,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints)
@ -987,7 +1024,7 @@ DOMMediaStream::CreateDOMTrack(TrackID aTrackID, MediaSegment::Type aType,
MOZ_ASSERT(FindOwnedDOMTrack(GetInputStream(), aTrackID) == nullptr);
MediaStreamTrack* track;
RefPtr<MediaStreamTrack> track;
switch (aType) {
case MediaSegment::AUDIO:
track = new AudioStreamTrack(this, aTrackID, aTrackID, aSource, aConstraints);
@ -999,19 +1036,13 @@ DOMMediaStream::CreateDOMTrack(TrackID aTrackID, MediaSegment::Type aType,
MOZ_CRASH("Unhandled track type");
}
LOG(LogLevel::Debug, ("DOMMediaStream %p Created new track %p with ID %u", this, track, aTrackID));
LOG(LogLevel::Debug, ("DOMMediaStream %p Created new track %p with ID %u",
this, track.get(), aTrackID));
mOwnedTracks.AppendElement(
new TrackPort(mOwnedPort, track, TrackPort::InputPortOwnership::EXTERNAL));
mTracks.AppendElement(
new TrackPort(mPlaybackPort, track, TrackPort::InputPortOwnership::EXTERNAL));
NotifyTrackAdded(track);
DispatchTrackEvent(NS_LITERAL_STRING("addtrack"), track);
return track;
return track.forget();
}
already_AddRefed<MediaStreamTrack>
@ -1302,6 +1333,7 @@ DOMLocalMediaStream::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProt
void
DOMLocalMediaStream::Stop()
{
LOG(LogLevel::Debug, ("DOMMediaStream %p Stop()", this));
nsCOMPtr<nsPIDOMWindowInner> pWindow = GetParentObject();
nsIDocument* document = pWindow ? pWindow->GetExtantDoc() : nullptr;
nsContentUtils::ReportToConsole(nsIScriptError::warningFlag,

Просмотреть файл

@ -511,19 +511,25 @@ public:
}
/**
* Called for each track in our owned stream to indicate to JS that we
* are carrying that track.
*
* Creates a MediaStreamTrack, adds it to mTracks, raises "addtrack" and
* returns it.
* Adds a MediaStreamTrack to mTracks and raises "addtrack".
*
* Note that "addtrack" is raised synchronously and only has an effect if
* this MediaStream is already exposed to script. For spec compliance this is
* to be called from an async task.
*/
MediaStreamTrack* CreateDOMTrack(TrackID aTrackID, MediaSegment::Type aType,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
void AddTrackInternal(MediaStreamTrack* aTrack);
/**
* Called for each track in our owned stream to indicate to JS that we
* are carrying that track.
*
* Pre-creates a MediaStreamTrack and returns it.
* It is up to the caller to make sure it is added through AddTrackInternal.
*/
already_AddRefed<MediaStreamTrack> CreateDOMTrack(TrackID aTrackID,
MediaSegment::Type aType,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
/**
* Creates a MediaStreamTrack cloned from aTrack, adds it to mTracks and

Просмотреть файл

@ -574,10 +574,6 @@ public:
bool Contains(const ElemType& aInterval) const {
for (const auto& interval : mIntervals) {
if (aInterval.LeftOf(interval)) {
// Will never succeed.
return false;
}
if (interval.Contains(aInterval)) {
return true;
}

Просмотреть файл

@ -508,6 +508,8 @@ MediaDecoder::MediaDecoder(MediaDecoderOwner* aOwner)
, mMinimizePreroll(false)
, mMediaTracksConstructed(false)
, mFiredMetadataLoaded(false)
, mElementVisible(!aOwner->IsHidden())
, mForcedHidden(false)
, mIsDormant(false)
, mIsHeuristicDormantSupported(
Preferences::GetBool("media.decoder.heuristic.dormant.enabled", false))
@ -1363,7 +1365,16 @@ void
MediaDecoder::SetElementVisibility(bool aIsVisible)
{
MOZ_ASSERT(NS_IsMainThread());
mIsVisible = aIsVisible;
mElementVisible = aIsVisible;
mIsVisible = !mForcedHidden && mElementVisible;
}
void
MediaDecoder::SetForcedHidden(bool aForcedHidden)
{
MOZ_ASSERT(NS_IsMainThread());
mForcedHidden = aForcedHidden;
SetElementVisibility(mElementVisible);
}
void
@ -1425,6 +1436,12 @@ media::TimeIntervals
MediaDecoder::GetSeekable()
{
MOZ_ASSERT(NS_IsMainThread());
if (IsNaN(GetDuration())) {
// We do not have a duration yet, we can't determine the seekable range.
return TimeIntervals();
}
// We can seek in buffered range if the media is seekable. Also, we can seek
// in unbuffered ranges if the transport level is seekable (local file or the
// server supports range requests, etc.) or in cue-less WebMs

Просмотреть файл

@ -378,6 +378,10 @@ private:
// Called from HTMLMediaElement when owner document activity changes
virtual void SetElementVisibility(bool aIsVisible);
// Force override the visible state to hidden.
// Called from HTMLMediaElement when testing of video decode suspend from mochitests.
void SetForcedHidden(bool aForcedHidden);
/******
* The following methods must only be called on the main
* thread.
@ -703,6 +707,12 @@ protected:
// only be accessed from main thread.
nsAutoPtr<MediaInfo> mInfo;
// Tracks the visiblity status from HTMLMediaElement
bool mElementVisible;
// If true, forces the decoder to be considered hidden.
bool mForcedHidden;
// True if MediaDecoder is in dormant state.
bool mIsDormant;

Просмотреть файл

@ -1972,11 +1972,6 @@ MediaFormatReader::UpdateBufferedWithPromise() {
void MediaFormatReader::ReleaseResources()
{
// Before freeing a video codec, all video buffers needed to be released
// even from graphics pipeline.
if (mVideoFrameContainer) {
mVideoFrameContainer->ClearCurrentFrame();
}
mVideo.ShutdownDecoder();
mAudio.ShutdownDecoder();
}

Просмотреть файл

@ -1207,8 +1207,10 @@ public:
new LocalTrackSource(principal, audioDeviceName, mListener, source,
kAudioTrack, mPeerIdentity);
MOZ_ASSERT(IsOn(mConstraints.mAudio));
domStream->CreateDOMTrack(kAudioTrack, MediaSegment::AUDIO, audioSource,
GetInvariant(mConstraints.mAudio));
RefPtr<MediaStreamTrack> track =
domStream->CreateDOMTrack(kAudioTrack, MediaSegment::AUDIO, audioSource,
GetInvariant(mConstraints.mAudio));
domStream->AddTrackInternal(track);
}
if (mVideoDevice) {
nsString videoDeviceName;
@ -1219,8 +1221,10 @@ public:
new LocalTrackSource(principal, videoDeviceName, mListener, source,
kVideoTrack, mPeerIdentity);
MOZ_ASSERT(IsOn(mConstraints.mVideo));
domStream->CreateDOMTrack(kVideoTrack, MediaSegment::VIDEO, videoSource,
GetInvariant(mConstraints.mVideo));
RefPtr<MediaStreamTrack> track =
domStream->CreateDOMTrack(kVideoTrack, MediaSegment::VIDEO, videoSource,
GetInvariant(mConstraints.mVideo));
domStream->AddTrackInternal(track);
}
stream = domStream->GetInputStream()->AsSourceStream();
}

Просмотреть файл

@ -174,9 +174,13 @@ public:
*/
virtual void AppendNullData(StreamTime aDuration) = 0;
/**
* Replace contents with disabled data of the same duration
* Replace contents with disabled (silence/black) data of the same duration
*/
virtual void ReplaceWithDisabled() = 0;
/**
* Replace contents with null data of the same duration
*/
virtual void ReplaceWithNull() = 0;
/**
* Remove all contents, setting duration to 0.
*/
@ -313,6 +317,10 @@ public:
if (GetType() != AUDIO) {
MOZ_CRASH("Disabling unknown segment type");
}
ReplaceWithNull();
}
void ReplaceWithNull() override
{
StreamTime duration = GetDuration();
Clear();
AppendNullData(duration);

Просмотреть файл

@ -1872,7 +1872,7 @@ MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
amount += mVideoOutputs.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mMainThreadListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mDisabledTrackIDs.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mDisabledTracks.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mConsumers.ShallowSizeOfExcludingThis(aMallocSizeOf);
return amount;
@ -2485,43 +2485,75 @@ MediaStream::RunAfterPendingUpdates(already_AddRefed<nsIRunnable> aRunnable)
}
void
MediaStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled)
MediaStream::SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode)
{
if (aEnabled) {
mDisabledTrackIDs.RemoveElement(aTrackID);
} else {
if (!mDisabledTrackIDs.Contains(aTrackID)) {
mDisabledTrackIDs.AppendElement(aTrackID);
if (aMode == DisabledTrackMode::ENABLED) {
for (int32_t i = mDisabledTracks.Length() - 1; i >= 0; --i) {
if (aTrackID == mDisabledTracks[i].mTrackID) {
mDisabledTracks.RemoveElementAt(i);
return;
}
}
} else {
for (const DisabledTrack& t : mDisabledTracks) {
if (aTrackID == t.mTrackID) {
NS_ERROR("Changing disabled track mode for a track is not allowed");
return;
}
}
mDisabledTracks.AppendElement(Move(DisabledTrack(aTrackID, aMode)));
}
}
DisabledTrackMode
MediaStream::GetDisabledTrackMode(TrackID aTrackID)
{
for (const DisabledTrack& t : mDisabledTracks) {
if (t.mTrackID == aTrackID) {
return t.mMode;
}
}
return DisabledTrackMode::ENABLED;
}
void
MediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled)
MediaStream::SetTrackEnabled(TrackID aTrackID, DisabledTrackMode aMode)
{
class Message : public ControlMessage {
public:
Message(MediaStream* aStream, TrackID aTrackID, bool aEnabled) :
ControlMessage(aStream), mTrackID(aTrackID), mEnabled(aEnabled) {}
Message(MediaStream* aStream, TrackID aTrackID, DisabledTrackMode aMode) :
ControlMessage(aStream),
mTrackID(aTrackID),
mMode(aMode) {}
void Run() override
{
mStream->SetTrackEnabledImpl(mTrackID, mEnabled);
mStream->SetTrackEnabledImpl(mTrackID, mMode);
}
TrackID mTrackID;
bool mEnabled;
DisabledTrackMode mMode;
};
GraphImpl()->AppendMessage(MakeUnique<Message>(this, aTrackID, aEnabled));
GraphImpl()->AppendMessage(MakeUnique<Message>(this, aTrackID, aMode));
}
void
MediaStream::ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment)
{
if (!mDisabledTrackIDs.Contains(aTrackID)) {
DisabledTrackMode mode = GetDisabledTrackMode(aTrackID);
if (mode == DisabledTrackMode::ENABLED) {
return;
}
aSegment->ReplaceWithDisabled();
if (aRawSegment) {
aRawSegment->ReplaceWithDisabled();
if (mode == DisabledTrackMode::SILENCE_BLACK) {
aSegment->ReplaceWithDisabled();
if (aRawSegment) {
aRawSegment->ReplaceWithDisabled();
}
} else if (mode == DisabledTrackMode::SILENCE_FREEZE) {
aSegment->ReplaceWithNull();
if (aRawSegment) {
aRawSegment->ReplaceWithNull();
}
} else {
MOZ_CRASH("Unsupported mode");
}
}
@ -2939,28 +2971,30 @@ SourceMediaStream::FinishWithLockHeld()
}
void
SourceMediaStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled)
SourceMediaStream::SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode)
{
{
MutexAutoLock lock(mMutex);
for (TrackBound<DirectMediaStreamTrackListener>& l: mDirectTrackListeners) {
if (l.mTrackID == aTrackID) {
bool oldEnabled = !mDisabledTrackIDs.Contains(aTrackID);
if (!oldEnabled && aEnabled) {
STREAM_LOG(LogLevel::Debug, ("SourceMediaStream %p track %d setting "
"direct listener enabled",
this, aTrackID));
l.mListener->DecreaseDisabled();
} else if (oldEnabled && !aEnabled) {
STREAM_LOG(LogLevel::Debug, ("SourceMediaStream %p track %d setting "
"direct listener disabled",
this, aTrackID));
l.mListener->IncreaseDisabled();
}
if (l.mTrackID != aTrackID) {
continue;
}
DisabledTrackMode oldMode = GetDisabledTrackMode(aTrackID);
bool oldEnabled = oldMode == DisabledTrackMode::ENABLED;
if (!oldEnabled && aMode == DisabledTrackMode::ENABLED) {
STREAM_LOG(LogLevel::Debug, ("SourceMediaStream %p track %d setting "
"direct listener enabled",
this, aTrackID));
l.mListener->DecreaseDisabled(oldMode);
} else if (oldEnabled && aMode != DisabledTrackMode::ENABLED) {
STREAM_LOG(LogLevel::Debug, ("SourceMediaStream %p track %d setting "
"direct listener disabled",
this, aTrackID));
l.mListener->IncreaseDisabled(aMode);
}
}
}
MediaStream::SetTrackEnabledImpl(aTrackID, aEnabled);
MediaStream::SetTrackEnabledImpl(aTrackID, aMode);
}
void

Просмотреть файл

@ -184,6 +184,24 @@ struct TrackBound
TrackID mTrackID;
};
/**
* Describes how a track should be disabled.
*
* ENABLED Not disabled.
* SILENCE_BLACK Audio data is turned into silence, video frames are made black.
* SILENCE_FREEZE Audio data is turned into silence, video freezes at last frame.
*/
enum class DisabledTrackMode
{
ENABLED, SILENCE_BLACK, SILENCE_FREEZE
};
struct DisabledTrack {
DisabledTrack(TrackID aTrackID, DisabledTrackMode aMode)
: mTrackID(aTrackID), mMode(aMode) {}
TrackID mTrackID;
DisabledTrackMode mMode;
};
/**
* A stream of synchronized audio and video data. All (not blocked) streams
* progress at the same rate --- "real time". Streams cannot seek. The only
@ -338,7 +356,7 @@ public:
// A disabled track has video replaced by black, and audio replaced by
// silence.
void SetTrackEnabled(TrackID aTrackID, bool aEnabled);
void SetTrackEnabled(TrackID aTrackID, DisabledTrackMode aMode);
// Finish event will be notified by calling methods of aListener. It is the
// responsibility of the caller to remove aListener before it is destroyed.
@ -442,7 +460,8 @@ public:
TrackID aTrackID);
virtual void RemoveDirectTrackListenerImpl(DirectMediaStreamTrackListener* aListener,
TrackID aTrackID);
virtual void SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled);
virtual void SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode);
DisabledTrackMode GetDisabledTrackMode(TrackID aTrackID);
void AddConsumer(MediaInputPort* aPort)
{
@ -598,7 +617,10 @@ protected:
nsTArray<RefPtr<MediaStreamListener> > mListeners;
nsTArray<TrackBound<MediaStreamTrackListener>> mTrackListeners;
nsTArray<MainThreadMediaStreamListener*> mMainThreadListeners;
nsTArray<TrackID> mDisabledTrackIDs;
// List of disabled TrackIDs and their associated disabled mode.
// They can either by disabled by frames being replaced by black, or by
// retaining the previous frame.
nsTArray<DisabledTrack> mDisabledTracks;
// GraphTime at which this stream starts blocking.
// This is only valid up to mStateComputedTime. The stream is considered to
@ -782,7 +804,7 @@ public:
}
// Overriding allows us to hold the mMutex lock while changing the track enable status
void SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled) override;
void SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode) override;
// Overriding allows us to ensure mMutex is locked while changing the track enable status
void

Просмотреть файл

@ -11,9 +11,15 @@
namespace mozilla {
#ifdef LOG
#undef LOG
#endif
#define LOG(type, msg) MOZ_LOG(gMediaStreamGraphLog, type, msg)
void
DirectMediaStreamTrackListener::MirrorAndDisableSegment(AudioSegment& aFrom,
AudioSegment& aTo)
AudioSegment& aTo)
{
aTo.Clear();
aTo.AppendNullData(aFrom.GetDuration());
@ -21,12 +27,20 @@ DirectMediaStreamTrackListener::MirrorAndDisableSegment(AudioSegment& aFrom,
void
DirectMediaStreamTrackListener::MirrorAndDisableSegment(VideoSegment& aFrom,
VideoSegment& aTo)
VideoSegment& aTo,
DisabledTrackMode aMode)
{
aTo.Clear();
for (VideoSegment::ChunkIterator it(aFrom); !it.IsEnded(); it.Next()) {
aTo.AppendFrame(do_AddRef(it->mFrame.GetImage()), it->GetDuration(),
it->mFrame.GetIntrinsicSize(), it->GetPrincipalHandle(), true);
if (aMode == DisabledTrackMode::SILENCE_BLACK) {
for (VideoSegment::ChunkIterator it(aFrom); !it.IsEnded(); it.Next()) {
aTo.AppendFrame(do_AddRef(it->mFrame.GetImage()),
it->GetDuration(),
it->mFrame.GetIntrinsicSize(),
it->GetPrincipalHandle(),
true);
}
} else if (aMode == DisabledTrackMode::SILENCE_FREEZE) {
aTo.AppendNullData(aFrom.GetDuration());
}
}
@ -35,11 +49,14 @@ DirectMediaStreamTrackListener::NotifyRealtimeTrackDataAndApplyTrackDisabling(Me
StreamTime aTrackOffset,
MediaSegment& aMedia)
{
if (mDisabledCount == 0) {
if (mDisabledFreezeCount == 0 && mDisabledBlackCount == 0) {
NotifyRealtimeTrackData(aGraph, aTrackOffset, aMedia);
return;
}
DisabledTrackMode mode = mDisabledBlackCount > 0
? DisabledTrackMode::SILENCE_BLACK
: DisabledTrackMode::SILENCE_FREEZE;
if (!mMedia) {
mMedia = aMedia.CreateEmptyClone();
}
@ -48,11 +65,52 @@ DirectMediaStreamTrackListener::NotifyRealtimeTrackDataAndApplyTrackDisabling(Me
static_cast<AudioSegment&>(*mMedia));
} else if (aMedia.GetType() == MediaSegment::VIDEO) {
MirrorAndDisableSegment(static_cast<VideoSegment&>(aMedia),
static_cast<VideoSegment&>(*mMedia));
static_cast<VideoSegment&>(*mMedia),
mode);
} else {
MOZ_CRASH("Unsupported media type");
}
NotifyRealtimeTrackData(aGraph, aTrackOffset, *mMedia);
}
void
DirectMediaStreamTrackListener::IncreaseDisabled(DisabledTrackMode aMode)
{
if (aMode == DisabledTrackMode::SILENCE_FREEZE) {
++mDisabledFreezeCount;
} else if (aMode == DisabledTrackMode::SILENCE_BLACK) {
++mDisabledBlackCount;
} else {
MOZ_ASSERT(false, "Unknown disabled mode");
}
LOG(LogLevel::Debug, ("DirectMediaStreamTrackListener %p increased disabled "
"mode %s. Current counts are: freeze=%d, black=%d",
this,
aMode == DisabledTrackMode::SILENCE_FREEZE ? "freeze" : "black",
int32_t(mDisabledFreezeCount),
int32_t(mDisabledBlackCount)));
}
void
DirectMediaStreamTrackListener::DecreaseDisabled(DisabledTrackMode aMode)
{
if (aMode == DisabledTrackMode::SILENCE_FREEZE) {
--mDisabledFreezeCount;
MOZ_ASSERT(mDisabledFreezeCount >= 0, "Double decrease");
} else if (aMode == DisabledTrackMode::SILENCE_BLACK) {
--mDisabledBlackCount;
MOZ_ASSERT(mDisabledBlackCount >= 0, "Double decrease");
} else {
MOZ_ASSERT(false, "Unknown disabled mode");
}
LOG(LogLevel::Debug, ("DirectMediaStreamTrackListener %p decreased disabled "
"mode %s. Current counts are: freeze=%d, black=%d",
this,
aMode == DisabledTrackMode::SILENCE_FREEZE ? "freeze" : "black",
int32_t(mDisabledFreezeCount),
int32_t(mDisabledBlackCount)));
}
} // namespace mozilla

Просмотреть файл

@ -269,25 +269,21 @@ protected:
virtual ~DirectMediaStreamTrackListener() {}
void MirrorAndDisableSegment(AudioSegment& aFrom, AudioSegment& aTo);
void MirrorAndDisableSegment(VideoSegment& aFrom, VideoSegment& aTo);
void MirrorAndDisableSegment(VideoSegment& aFrom,
VideoSegment& aTo,
DisabledTrackMode aMode);
void NotifyRealtimeTrackDataAndApplyTrackDisabling(MediaStreamGraph* aGraph,
StreamTime aTrackOffset,
MediaSegment& aMedia);
void IncreaseDisabled()
{
++mDisabledCount;
}
void DecreaseDisabled()
{
--mDisabledCount;
MOZ_ASSERT(mDisabledCount >= 0, "Double decrease");
}
void IncreaseDisabled(DisabledTrackMode aMode);
void DecreaseDisabled(DisabledTrackMode aMode);
// Matches the number of disabled streams to which this listener is attached.
// The number of streams are those between the stream the listener was added
// and the SourceMediaStream that is the input of the data.
Atomic<int32_t> mDisabledCount;
Atomic<int32_t> mDisabledFreezeCount;
Atomic<int32_t> mDisabledBlackCount;
nsAutoPtr<MediaSegment> mMedia;
};

Просмотреть файл

@ -18,7 +18,7 @@
#undef LOG
#endif
static PRLogModuleInfo* gMediaStreamTrackLog;
static mozilla::LazyLogModule gMediaStreamTrackLog("MediaStreamTrack");
#define LOG(type, msg) MOZ_LOG(gMediaStreamTrackLog, type, msg)
namespace mozilla {
@ -120,10 +120,6 @@ MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,
mConstraints(aConstraints)
{
if (!gMediaStreamTrackLog) {
gMediaStreamTrackLog = PR_NewLogModule("MediaStreamTrack");
}
GetSource().RegisterSink(this);
mPrincipalHandleListener = new PrincipalHandleListener(this);
@ -216,7 +212,8 @@ MediaStreamTrack::SetEnabled(bool aEnabled)
this, aEnabled ? "Enabled" : "Disabled"));
mEnabled = aEnabled;
GetOwnedStream()->SetTrackEnabled(mTrackID, aEnabled);
GetOwnedStream()->SetTrackEnabled(mTrackID, mEnabled ? DisabledTrackMode::ENABLED
: DisabledTrackMode::SILENCE_BLACK);
}
void
@ -483,12 +480,13 @@ MediaStreamTrack::RemoveDirectListener(DirectMediaStreamTrackListener *aListener
}
already_AddRefed<MediaInputPort>
MediaStreamTrack::ForwardTrackContentsTo(ProcessedMediaStream* aStream)
MediaStreamTrack::ForwardTrackContentsTo(ProcessedMediaStream* aStream,
TrackID aDestinationTrackID)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_RELEASE_ASSERT(aStream);
RefPtr<MediaInputPort> port =
aStream->AllocateInputPort(GetOwnedStream(), mTrackID);
aStream->AllocateInputPort(GetOwnedStream(), mTrackID, aDestinationTrackID);
return port.forget();
}

Просмотреть файл

@ -388,7 +388,8 @@ public:
* Sets up a MediaInputPort from the underlying track that this
* MediaStreamTrack represents, to aStream, and returns it.
*/
already_AddRefed<MediaInputPort> ForwardTrackContentsTo(ProcessedMediaStream* aStream);
already_AddRefed<MediaInputPort> ForwardTrackContentsTo(ProcessedMediaStream* aStream,
TrackID aDestinationTrackID = TRACK_ANY);
/**
* Returns true if this track is connected to aPort and forwarded to aPort's

Просмотреть файл

@ -5,7 +5,9 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaTrack.h"
#include "AudioTrack.h"
#include "MediaTrackList.h"
#include "VideoTrack.h"
namespace mozilla {
namespace dom {

Просмотреть файл

@ -72,12 +72,23 @@ MediaTrackList::AddTrack(MediaTrack* aTrack)
aTrack->Init(GetOwner());
aTrack->SetTrackList(this);
CreateAndDispatchTrackEventRunner(aTrack, NS_LITERAL_STRING("addtrack"));
if ((!aTrack->AsAudioTrack() || !aTrack->AsAudioTrack()->Enabled()) &&
(!aTrack->AsVideoTrack() || !aTrack->AsVideoTrack()->Selected())) {
// Track not enabled, no need to notify media element.
return;
}
if (HTMLMediaElement* element = GetMediaElement()) {
element->NotifyMediaTrackEnabled(aTrack);
}
}
void
MediaTrackList::RemoveTrack(const RefPtr<MediaTrack>& aTrack)
{
mTracks.RemoveElement(aTrack);
aTrack->SetEnabledInternal(false, MediaTrack::FIRE_NO_EVENTS);
aTrack->SetTrackList(nullptr);
CreateAndDispatchTrackEventRunner(aTrack, NS_LITERAL_STRING("removetrack"));
}
@ -118,6 +129,7 @@ void
MediaTrackList::EmptyTracks()
{
for (uint32_t i = 0; i < mTracks.Length(); ++i) {
mTracks[i]->SetEnabledInternal(false, MediaTrack::FIRE_NO_EVENTS);
mTracks[i]->SetTrackList(nullptr);
}
mTracks.Clear();

Просмотреть файл

@ -58,6 +58,10 @@ TrackUnionStream::TrackUnionStream() :
if (mTrackMap[i].mInputPort == aPort) {
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p removing trackmap entry %d", this, i));
EndTrack(i);
for (auto listener : mTrackMap[i].mOwnedDirectListeners) {
// Remove listeners while the entry still exists.
RemoveDirectTrackListenerImpl(listener, mTrackMap[i].mOutputTrackID);
}
mTrackMap.RemoveElementAt(i);
}
}
@ -130,6 +134,10 @@ TrackUnionStream::TrackUnionStream() :
allFinished = false;
}
if (!mappedTracksWithMatchingInputTracks[i]) {
for (auto listener : mTrackMap[i].mOwnedDirectListeners) {
// Remove listeners while the entry still exists.
RemoveDirectTrackListenerImpl(listener, mTrackMap[i].mOutputTrackID);
}
mTrackMap.RemoveElementAt(i);
}
}
@ -229,8 +237,9 @@ TrackUnionStream::TrackUnionStream() :
}
MediaStream* source = map->mInputPort->GetSource();
map->mOwnedDirectListeners.AppendElement(bound.mListener);
if (mDisabledTrackIDs.Contains(bound.mTrackID)) {
bound.mListener->IncreaseDisabled();
DisabledTrackMode currentMode = GetDisabledTrackMode(bound.mTrackID);
if (currentMode != DisabledTrackMode::ENABLED) {
bound.mListener->IncreaseDisabled(currentMode);
}
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p adding direct listener "
"%p for track %d. Forwarding to input "
@ -345,28 +354,30 @@ TrackUnionStream::TrackUnionStream() :
}
void
TrackUnionStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled) {
TrackUnionStream::SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode) {
bool enabled = aMode == DisabledTrackMode::ENABLED;
for (TrackMapEntry& entry : mTrackMap) {
if (entry.mOutputTrackID == aTrackID) {
STREAM_LOG(LogLevel::Info, ("TrackUnionStream %p track %d was explicitly %s",
this, aTrackID, aEnabled ? "enabled" : "disabled"));
this, aTrackID, enabled ? "enabled" : "disabled"));
for (DirectMediaStreamTrackListener* listener : entry.mOwnedDirectListeners) {
bool oldEnabled = !mDisabledTrackIDs.Contains(aTrackID);
if (!oldEnabled && aEnabled) {
DisabledTrackMode oldMode = GetDisabledTrackMode(aTrackID);
bool oldEnabled = oldMode == DisabledTrackMode::ENABLED;
if (!oldEnabled && enabled) {
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p track %d setting "
"direct listener enabled",
this, aTrackID));
listener->DecreaseDisabled();
} else if (oldEnabled && !aEnabled) {
listener->DecreaseDisabled(oldMode);
} else if (oldEnabled && !enabled) {
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p track %d setting "
"direct listener disabled",
this, aTrackID));
listener->IncreaseDisabled();
listener->IncreaseDisabled(aMode);
}
}
}
}
MediaStream::SetTrackEnabledImpl(aTrackID, aEnabled);
MediaStream::SetTrackEnabledImpl(aTrackID, aMode);
}
MediaStream*
@ -408,8 +419,9 @@ TrackUnionStream::AddDirectTrackListenerImpl(already_AddRefed<DirectMediaStreamT
this, listener.get(), aTrackID, source,
entry.mInputTrackID));
entry.mOwnedDirectListeners.AppendElement(listener);
if (mDisabledTrackIDs.Contains(aTrackID)) {
listener->IncreaseDisabled();
DisabledTrackMode currentMode = GetDisabledTrackMode(aTrackID);
if (currentMode != DisabledTrackMode::ENABLED) {
listener->IncreaseDisabled(currentMode);
}
source->AddDirectTrackListenerImpl(listener.forget(),
entry.mInputTrackID);
@ -440,9 +452,10 @@ TrackUnionStream::RemoveDirectTrackListenerImpl(DirectMediaStreamTrackListener*
this, aListener, aTrackID,
entry.mInputPort->GetSource(),
entry.mInputTrackID));
if (mDisabledTrackIDs.Contains(aTrackID)) {
DisabledTrackMode currentMode = GetDisabledTrackMode(aTrackID);
if (currentMode != DisabledTrackMode::ENABLED) {
// Reset the listener's state.
aListener->DecreaseDisabled();
aListener->DecreaseDisabled(currentMode);
}
entry.mOwnedDirectListeners.RemoveElementAt(i);
break;

Просмотреть файл

@ -25,7 +25,7 @@ public:
void RemoveInput(MediaInputPort* aPort) override;
void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
void SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled) override;
void SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode) override;
MediaStream* GetInputStreamFor(TrackID aTrackID) override;
TrackID GetInputTrackIDFor(TrackID aTrackID) override;

Просмотреть файл

@ -78,19 +78,24 @@ VideoTrack::SetEnabledInternal(bool aEnabled, int aFlags)
// Set the index of selected video track to the current's index.
list.mSelectedIndex = curIndex;
HTMLMediaElement* element = mList->GetMediaElement();
if (element) {
element->NotifyMediaTrackEnabled(this);
}
} else {
list.mSelectedIndex = -1;
HTMLMediaElement* element = mList->GetMediaElement();
if (element) {
element->NotifyMediaTrackDisabled(this);
}
}
// Fire the change event at selection changes on this video track, shall
// propose a spec change later.
if (!(aFlags & MediaTrack::FIRE_NO_EVENTS)) {
list.CreateAndDispatchChangeEvent();
HTMLMediaElement* element = mList->GetMediaElement();
if (element) {
element->NotifyMediaTrackEnabled(this);
}
}
}

Просмотреть файл

@ -29,9 +29,8 @@ MediaSourceDemuxer::MediaSourceDemuxer()
}
// Due to inaccuracies in determining buffer end
// frames (Bug 1065207). This value is based on the end of frame
// default value used in Blink, kDefaultBufferDurationInMs.
const TimeUnit MediaSourceDemuxer::EOS_FUZZ = media::TimeUnit::FromMicroseconds(125000);
// frames (Bug 1065207). This value is based on videos seen in the wild.
const TimeUnit MediaSourceDemuxer::EOS_FUZZ = media::TimeUnit::FromMicroseconds(500000);
RefPtr<MediaSourceDemuxer::InitPromise>
MediaSourceDemuxer::Init()
@ -381,7 +380,9 @@ RefPtr<MediaSourceTrackDemuxer::SeekPromise>
MediaSourceTrackDemuxer::DoSeek(media::TimeUnit aTime)
{
TimeIntervals buffered = mManager->Buffered(mType);
buffered.SetFuzz(MediaSourceDemuxer::EOS_FUZZ);
// Fuzz factor represents a +/- threshold. So when seeking it allows the gap
// to be twice as big as the fuzz value. We only want to allow EOS_FUZZ gap.
buffered.SetFuzz(MediaSourceDemuxer::EOS_FUZZ / 2);
TimeUnit seekTime = std::max(aTime - mPreRoll, TimeUnit::FromMicroseconds(0));
if (!buffered.Contains(seekTime)) {
@ -400,7 +401,7 @@ MediaSourceTrackDemuxer::DoSeek(media::TimeUnit aTime)
MOZ_ASSERT(index != TimeIntervals::NoIndex);
seekTime = buffered[index].mStart;
}
seekTime = mManager->Seek(mType, seekTime, MediaSourceDemuxer::EOS_FUZZ);
seekTime = mManager->Seek(mType, seekTime, MediaSourceDemuxer::EOS_FUZZ / 2);
bool error;
RefPtr<MediaRawData> sample =
mManager->GetSample(mType,

Просмотреть файл

@ -115,6 +115,8 @@ skip-if = ((os == "win" && os_version == "5.1") || (toolkit == 'android')) # Not
[test_SplitAppend.html]
[test_SplitAppend_mp4.html]
skip-if = ((os == "win" && os_version == "5.1") || (toolkit == 'android')) # Not supported on xp and android 2.3
[test_Threshold_mp4.html]
skip-if = ((os == "win" && os_version == "5.1") || (toolkit == 'android')) # Not supported on xp and android 2.3
[test_TimestampOffset_mp4.html]
skip-if = ((os == "win" && os_version == "5.1") || (toolkit == 'android')) # Not supported on xp and android 2.3
[test_TruncatedDuration.html]

Просмотреть файл

@ -22,7 +22,7 @@ runWithMSE(function(ms, el) {
el.addEventListener("playing", function() {
ok(el.buffered.length > 0, "data is buffered");
is(el.buffered.start(0), 0, "must fire playing when data has been loaded");
is(el.currentTime, 0, "must fire playing at start");
ok(el.currentTime >= 0, "must have started playback");
});
once(ms, 'sourceopen').then(function() {
ok(true, "Receive a sourceopen event");

Просмотреть файл

@ -0,0 +1,81 @@
<!DOCTYPE HTML>
<html>
<head>
<title>MSE: data gap detection</title>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="mediasource.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<pre id="test">
<script class="testbody" type="text/javascript">
SimpleTest.waitForExplicitFinish();
runWithMSE(function(ms, el) {
var threshold = 0.5; // gap threshold in seconds.
// duration of a frame. The FFmpeg decoder can't calculate properly calculate the duration of the last frame.
var fuzz = 33322 / 1000000;
once(ms, 'sourceopen').then(function() {
ok(true, "Receive a sourceopen event");
var videosb = ms.addSourceBuffer("video/mp4");
var vchunks = [ {start: 0, end: 3.2033}, { start: 3.2033, end: 6.4066}];
fetchAndLoad(videosb, 'bipbop/bipbop_video', ['init'], '.mp4')
.then(fetchAndLoad.bind(null, videosb, 'bipbop/bipbop_video', range(1, 5), '.m4s'))
.then(function() {
// We will insert a gap of threshold
videosb.timestampOffset = threshold;
return fetchAndLoad(videosb, 'bipbop/bipbop_video', range(5, 9), '.m4s');
}).then(function() {
// HTMLMediaElement fires 'waiting' if somebody invokes |play()| before the MDSM
// has notified it of available data. Make sure that we get 'playing' before
// we starting waiting for 'waiting'.
info("Invoking play()");
var p = once(el, 'playing');
el.play();
return p;
}).then(function() {
return once(el, 'waiting');
}).then(function() {
// We're waiting for data at the end of the last segment.
isfuzzy(el.currentTime, vchunks[1].end + threshold, fuzz, "skipped the gap properly");
is(el.buffered.length, 2, "buffered range has right length");
// Now we test that seeking will succeed despite the gap.
el.currentTime = el.buffered.end(0) + (threshold / 2);
return once(el, 'seeked');
}).then(function() {
// Now we test that we don't pass the gap.
// Clean up our sourcebuffer by removing all data.
videosb.timestampOffset = 0;
videosb.remove(0, Infinity);
el.currentTime = 0;
el.pause();
return once(videosb, "updateend");
}).then(function() {
return fetchAndLoad(videosb, 'bipbop/bipbop_video', range(1, 5), '.m4s');
}).then(function() {
// We will insert a gap of threshold + 1ms
videosb.timestampOffset = threshold + 1/1000;
return fetchAndLoad(videosb, 'bipbop/bipbop_video', range(5, 9), '.m4s');
}).then(function() {
info("Invoking play()");
var p = once(el, 'playing');
el.play();
return p;
}).then(function() {
return once(el, 'waiting');
}).then(function() {
// We're waiting for data at the end of the first segment as the gap is too big.
isfuzzy(el.currentTime, vchunks[0].end, fuzz, "stopped at the gap properly");
SimpleTest.finish();
});
});
});
</script>
</pre>
</body>
</html>

Двоичные данные
dom/media/test/gizmo-noaudio.webm Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1 @@
Cache-Control: no-store

Двоичные данные
dom/media/test/gizmo.webm Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -0,0 +1 @@
Cache-Control: no-store

Просмотреть файл

@ -1430,7 +1430,9 @@ var gEMENonMSEFailTests = [
// background tabs tests.
var gDecodeSuspendTests = [
{ name:"gizmo.mp4", type:"video/mp4", duration:5.56 },
{ name:"gizmo-noaudio.mp4", type:"video/mp4", duration:5.56 }
{ name:"gizmo-noaudio.mp4", type:"video/mp4", duration:5.56 },
{ name:"gizmo.webm", type:'video/webm; codecs="vp9,opus"', duration:5.56 },
{ name:"gizmo-noaudio.webm", type:'video/webm; codecs="vp9"', duration:5.56 }
];
function checkMetadata(msg, e, test) {

Просмотреть файл

@ -423,6 +423,10 @@ support-files =
gizmo.mp4^headers^
gizmo-noaudio.mp4
gizmo-noaudio.mp4^headers^
gizmo.webm
gizmo.webm^headers^
gizmo-noaudio.webm
gizmo-noaudio.webm^headers^
huge-id3.mp3
huge-id3.mp3^headers^
id3tags.mp3

Просмотреть файл

@ -33,13 +33,19 @@ try {
function AudioStreamAnalyser(ac, stream) {
this.audioContext = ac;
this.stream = stream;
this.sourceNodes = this.stream.getAudioTracks().map(
t => this.audioContext.createMediaStreamSource(new MediaStream([t])));
this.sourceNodes = [];
this.analyser = this.audioContext.createAnalyser();
// Setting values lower than default for speedier testing on emulators
this.analyser.smoothingTimeConstant = 0.2;
this.analyser.fftSize = 1024;
this.sourceNodes.forEach(n => n.connect(this.analyser));
this.connectTrack = t => {
let source = this.audioContext.createMediaStreamSource(new MediaStream([t]));
this.sourceNodes.push(source);
source.connect(this.analyser);
};
this.stream.getAudioTracks().forEach(t => this.connectTrack(t));
this.onaddtrack = ev => this.connectTrack(ev.track);
this.stream.addEventListener("addtrack", this.onaddtrack);
this.data = new Uint8Array(this.analyser.frequencyBinCount);
}
@ -75,7 +81,7 @@ AudioStreamAnalyser.prototype = {
c.clearRect(0, 0, cvs.width, cvs.height);
var array = self.getByteFrequencyData();
for (var i = 0; i < array.length; i++) {
c.fillRect(i, (cvs.height - (array[i])), 1, cvs.height);
c.fillRect(i, (cvs.height - (array[i] / 2)), 1, cvs.height);
}
if (!cvs.stopDrawing) {
requestAnimationFrame(render);
@ -106,6 +112,7 @@ AudioStreamAnalyser.prototype = {
this.disableDebugCanvas();
this.sourceNodes.forEach(n => n.disconnect());
this.sourceNodes = [];
this.stream.removeEventListener("addtrack", this.onaddtrack);
},
/**
@ -591,6 +598,33 @@ function createOneShotEventWrapper(wrapper, obj, event) {
};
}
/**
* Returns a promise that resolves when `target` has raised an event with the
* given name the given number of times. Cancel the returned promise by passing
* in a `cancelPromise` and resolve it.
*
* @param {object} target
* The target on which the event should occur.
* @param {string} name
* The name of the event that should occur.
* @param {integer} count
* Optional number of times the event should be raised before resolving.
* @param {promise} cancelPromise
* Optional promise that on resolving rejects the returned promise,
* so we can avoid logging results after a test has finished.
* @returns {promise} A promise that resolves to the last of the seen events.
*/
function haveEvents(target, name, count, cancelPromise) {
var listener;
var counter = count || 1;
return Promise.race([
(cancelPromise || new Promise(() => {})).then(e => Promise.reject(e)),
new Promise(resolve =>
target.addEventListener(name, listener = e => (--counter < 1 && resolve(e))))
])
.then(e => (target.removeEventListener(name, listener), e));
};
/**
* Returns a promise that resolves when `target` has raised an event with the
* given name. Cancel the returned promise by passing in a `cancelPromise` and
@ -601,16 +635,52 @@ function createOneShotEventWrapper(wrapper, obj, event) {
* @param {string} name
* The name of the event that should occur.
* @param {promise} cancelPromise
* A promise that on resolving rejects the returned promise,
* Optional promise that on resolving rejects the returned promise,
* so we can avoid logging results after a test has finished.
* @returns {promise} A promise that resolves to the seen event.
*/
function haveEvent(target, name, cancelPromise) {
var listener;
var p = Promise.race([
(cancelPromise || new Promise()).then(e => Promise.reject(e)),
new Promise(resolve => target.addEventListener(name, listener = resolve))
]);
return p.then(event => (target.removeEventListener(name, listener), event));
return haveEvents(target, name, 1, cancelPromise);
};
/**
* Returns a promise that resolves if the target has not seen the given event
* after one crank (or until the given timeoutPromise resolves) of the event
* loop.
*
* @param {object} target
* The target on which the event should not occur.
* @param {string} name
* The name of the event that should not occur.
* @param {promise} timeoutPromise
* Optional promise defining how long we should wait before resolving.
* @returns {promise} A promise that is rejected if we see the given event, or
* resolves after a timeout otherwise.
*/
function haveNoEvent(target, name, timeoutPromise) {
return haveEvent(target, name, timeoutPromise || wait(0))
.then(() => Promise.reject(new Error("Too many " + name + " events")),
() => {});
};
/**
* Returns a promise that resolves after the target has seen the given number
* of events but no such event in a following crank of the event loop.
*
* @param {object} target
* The target on which the events should occur.
* @param {string} name
* The name of the event that should occur.
* @param {integer} count
* Optional number of times the event should be raised before resolving.
* @param {promise} cancelPromise
* Optional promise that on resolving rejects the returned promise,
* so we can avoid logging results after a test has finished.
* @returns {promise} A promise that resolves to the last of the seen events.
*/
function haveEventsButNoMore(target, name, count, cancelPromise) {
return haveEvents(target, name, count, cancelPromise)
.then(e => haveNoEvent(target, name).then(() => e));
};
/**

Просмотреть файл

@ -68,6 +68,12 @@ skip-if = toolkit == 'gonk' || buildapp == 'mulet' # Bug 1063290, intermittent t
[test_getUserMedia_getTrackById.html]
[test_getUserMedia_gumWithinGum.html]
[test_getUserMedia_loadedmetadata.html]
[test_getUserMedia_mediaElementCapture_audio.html]
skip-if = toolkit == 'gonk' || buildapp == 'mulet' || android_version == '18' # b2g emulator seems to be too slow (Bug 1016498 and 1008080), android(Bug 1189784, timeouts on 4.3 emulator)
[test_getUserMedia_mediaElementCapture_tracks.html]
skip-if = toolkit == 'gonk' || buildapp == 'mulet' || android_version == '18' # b2g emulator seems to be too slow (Bug 1016498 and 1008080), android(Bug 1189784, timeouts on 4.3 emulator)
[test_getUserMedia_mediaElementCapture_video.html]
skip-if = toolkit == 'gonk' || buildapp == 'mulet' || android_version == '18' # b2g emulator seems to be too slow (Bug 1016498 and 1008080), android(Bug 1189784, timeouts on 4.3 emulator)
[test_getUserMedia_mediaStreamClone.html]
[test_getUserMedia_mediaStreamConstructors.html]
[test_getUserMedia_mediaStreamTrackClone.html]

Просмотреть файл

@ -0,0 +1,115 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">
<script>
createHTML({
bug: "1259788",
title: "Test CaptureStream audio content on HTMLMediaElement playing a gUM MediaStream",
visible: true
});
var audioContext;
var gUMAudioElement;
var analyser;
runTest(() => getUserMedia({audio: true})
.then(stream => {
gUMAudioElement = createMediaElement("gUMAudio", "local", "gUMAudio", true);
gUMAudioElement.srcObject = stream;
audioContext = new AudioContext();
info("Capturing");
analyser = new AudioStreamAnalyser(audioContext,
gUMAudioElement.mozCaptureStream());
analyser.enableDebugCanvas();
return analyser.waitForAnalysisSuccess(array =>
array[analyser.binIndexForFrequency(50)] < 50 &&
array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
array[analyser.binIndexForFrequency(2500)] < 50);
})
.then(() => {
info("Audio flowing. Pausing.");
gUMAudioElement.pause();
return analyser.waitForAnalysisSuccess(array =>
array[analyser.binIndexForFrequency(50)] < 50 &&
array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] < 50 &&
array[analyser.binIndexForFrequency(2500)] < 50);
})
.then(() => {
info("Audio stopped flowing. Playing.");
gUMAudioElement.play();
return analyser.waitForAnalysisSuccess(array =>
array[analyser.binIndexForFrequency(50)] < 50 &&
array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
array[analyser.binIndexForFrequency(2500)] < 50);
})
.then(() => {
info("Audio flowing. Removing source.");
var stream = gUMAudioElement.srcObject;
gUMAudioElement.srcObject = null;
return analyser.waitForAnalysisSuccess(array =>
array[analyser.binIndexForFrequency(50)] < 50 &&
array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] < 50 &&
array[analyser.binIndexForFrequency(2500)] < 50)
.then(() => stream);
})
.then(stream => {
info("Audio stopped flowing. Setting source.");
gUMAudioElement.srcObject = stream;
return analyser.waitForAnalysisSuccess(array =>
array[analyser.binIndexForFrequency(50)] < 50 &&
array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
array[analyser.binIndexForFrequency(2500)] < 50);
})
.then(() => {
info("Audio flowing from new source. Adding a track.");
let oscillator = audioContext.createOscillator();
oscillator.type = 'sine';
oscillator.frequency.value = 2000;
oscillator.start();
let oscOut = audioContext.createMediaStreamDestination();
oscillator.connect(oscOut);
gUMAudioElement.srcObject.addTrack(oscOut.stream.getTracks()[0]);
return analyser.waitForAnalysisSuccess(array =>
array[analyser.binIndexForFrequency(50)] < 50 &&
array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
array[analyser.binIndexForFrequency(1500)] < 50 &&
array[analyser.binIndexForFrequency(2000)] > 200 &&
array[analyser.binIndexForFrequency(2500)] < 50);
})
.then(() => {
info("Audio flowing from new track. Removing a track.");
const gUMTrack = gUMAudioElement.srcObject.getTracks()[0];
gUMAudioElement.srcObject.removeTrack(gUMTrack);
is(gUMAudioElement.srcObject.getTracks().length, 1,
"A track should have been removed");
return analyser.waitForAnalysisSuccess(array =>
array[analyser.binIndexForFrequency(50)] < 50 &&
array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] < 50 &&
array[analyser.binIndexForFrequency(1500)] < 50 &&
array[analyser.binIndexForFrequency(2000)] > 200 &&
array[analyser.binIndexForFrequency(2500)] < 50);
})
.then(() => ok(true, "Test passed."))
.catch(e => ok(false, "Test failed: " + e + (e.stack ? "\n" + e.stack : ""))));
</script>
</pre>
</body>
</html>

Просмотреть файл

@ -0,0 +1,185 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">
<script>
createHTML({
bug: "1259788",
title: "Test CaptureStream track output on HTMLMediaElement playing a gUM MediaStream",
visible: true
});
var audioElement;
var audioCaptureStream;
var videoElement;
var videoCaptureStream;
var untilEndedElement;
var streamUntilEnded;
var tracks = [];
runTest(() => getUserMedia({audio: true, video: true})
.then(stream => {
// We need to test with multiple tracks. We add an extra of each kind.
stream.getTracks().forEach(t => stream.addTrack(t.clone()));
audioElement = createMediaElement("gUMAudio", "local", "gUMAudio", true);
audioElement.srcObject = stream;
return haveEvent(audioElement, "loadedmetadata", wait(5000, new Error("Timeout")));
})
.then(() => {
info("Capturing audio element (loadedmetadata -> captureStream)");
audioCaptureStream = audioElement.mozCaptureStream();
is(audioCaptureStream.getAudioTracks().length, 2,
"audio element should capture two audio tracks");
is(audioCaptureStream.getVideoTracks().length, 0,
"audio element should not capture any video tracks");
return haveNoEvent(audioCaptureStream, "addtrack");
})
.then(() => {
videoElement = createMediaElement("gUMVideo", "local", "gUMVideo", false);
info("Capturing video element (captureStream -> loadedmetadata)");
videoCaptureStream = videoElement.mozCaptureStream();
videoElement.srcObject = audioElement.srcObject.clone();
is(videoCaptureStream.getTracks().length, 0,
"video element should have no tracks before metadata known");
return haveEventsButNoMore(
videoCaptureStream, "addtrack", 3, wait(5000, new Error("No event")));
})
.then(() => {
is(videoCaptureStream.getAudioTracks().length, 2,
"video element should capture two audio tracks");
is(videoCaptureStream.getVideoTracks().length, 1,
"video element should capture one video track at most");
info("Testing dynamically adding audio track to audio element");
audioElement.srcObject.addTrack(
audioElement.srcObject.getAudioTracks()[0].clone());
return haveEventsButNoMore(
audioCaptureStream, "addtrack", 1, wait(5000, new Error("No event")));
})
.then(() => {
is(audioCaptureStream.getAudioTracks().length, 3,
"Audio element should have three audio tracks captured.");
info("Testing dynamically adding video track to audio element");
audioElement.srcObject.addTrack(
audioElement.srcObject.getVideoTracks()[0].clone());
return haveNoEvent(audioCaptureStream, "addtrack");
})
.then(() => {
is(audioCaptureStream.getVideoTracks().length, 0,
"Audio element should have no video tracks captured.");
info("Testing dynamically adding audio track to video element");
videoElement.srcObject.addTrack(
videoElement.srcObject.getAudioTracks()[0].clone());
return haveEventsButNoMore(
videoCaptureStream, "addtrack", 1, wait(5000, new Error("Timeout")));
})
.then(() => {
is(videoCaptureStream.getAudioTracks().length, 3,
"Captured video stream should have three audio tracks captured.");
info("Testing dynamically adding video track to video element");
videoElement.srcObject.addTrack(
videoElement.srcObject.getVideoTracks()[0].clone());
return haveNoEvent(videoCaptureStream, "addtrack");
})
.then(() => {
is(videoCaptureStream.getVideoTracks().length, 1,
"Captured video stream should have at most one video tracks captured.");
info("Testing track removal.");
tracks.push(...videoElement.srcObject.getTracks());
videoElement.srcObject.getVideoTracks().reverse().forEach(t =>
videoElement.srcObject.removeTrack(t));
is(videoCaptureStream.getVideoTracks()
.filter(t => t.readyState == "live").length, 1,
"Captured video should have still have one video track captured.");
return haveEvent(videoCaptureStream.getVideoTracks()[0], "ended",
wait(5000, new Error("Timeout")));
})
.then(() => {
is(videoCaptureStream.getVideoTracks()
.filter(t => t.readyState == "live").length, 0,
"Captured video stream should have no video tracks captured after removal.");
info("Testing source reset.");
})
.then(() => getUserMedia({audio: true, video: true}))
.then(stream => {
videoElement.srcObject = stream;
return Promise.all(videoCaptureStream.getTracks()
.filter(t => t.readyState == "live")
.map(t => haveEvent(t, "ended", wait(5000, new Error("Timeout")))));
})
.then(() => haveEventsButNoMore(
videoCaptureStream, "addtrack", 2, wait(5000, new Error("Timeout"))))
.then(() => {
is(videoCaptureStream.getAudioTracks()
.filter(t => t.readyState == "ended").length, 3,
"Captured video stream should have three ended audio tracks");
is(videoCaptureStream.getAudioTracks()
.filter(t => t.readyState == "live").length, 1,
"Captured video stream should have one live audio track");
is(videoCaptureStream.getVideoTracks()
.filter(t => t.readyState == "ended").length, 1,
"Captured video stream should have one ended video tracks");
is(videoCaptureStream.getVideoTracks()
.filter(t => t.readyState == "live").length, 1,
"Captured video stream should have one live video track");
info("Testing CaptureStreamUntilEnded");
untilEndedElement =
createMediaElement("gUMVideoUntilEnded", "local", "gUMVideoUntilEnded", false);
untilEndedElement.srcObject = audioElement.srcObject;
return haveEvent(untilEndedElement, "loadedmetadata",
wait(5000, new Error("Timeout")));
})
.then(() => {
streamUntilEnded = untilEndedElement.mozCaptureStreamUntilEnded();
is(streamUntilEnded.getAudioTracks().length, 3,
"video element should capture all 3 audio tracks until ended");
is(streamUntilEnded.getVideoTracks().length, 1,
"video element should capture only 1 video track until ended");
untilEndedElement.srcObject.getTracks().forEach(t => t.stop());
// TODO(1208316) We stop the stream to make the media element end.
untilEndedElement.srcObject.stop();
return Promise.all([
haveEvent(untilEndedElement, "ended", wait(5000, new Error("Timeout"))),
...streamUntilEnded.getTracks()
.map(t => haveEvent(t, "ended", wait(5000, new Error("Timeout"))))
]);
})
.then(() => {
info("Element and tracks ended. Ensuring that new tracks aren't created.");
untilEndedElement.srcObject = videoElement.srcObject;
return haveEventsButNoMore(
untilEndedElement, "loadedmetadata", 1, wait(5000, new Error("Timeout")));
})
.then(() => is(streamUntilEnded.getTracks().length, 4,
"Should still have 4 tracks"))
.catch(e => ok(false, "Test failed: " + e + (e && e.stack ? "\n" + e.stack : "")))
.then(() => [...tracks,
...videoElement.srcObject.getTracks()].forEach(t => t.stop())));
</script>
</pre>
</body>
</html>

Просмотреть файл

@ -0,0 +1,119 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">
<script>
createHTML({
bug: "1259788",
title: "Test CaptureStream video content on HTMLMediaElement playing a gUM MediaStream",
visible: true
});
var gUMVideoElement;
var captureStreamElement;
// We check a pixel somewhere away from the top left corner since
// MediaEngineDefault puts semi-transparent time indicators there.
const offsetX = 20;
const offsetY = 20;
const threshold = 16;
const pausedTimeout = 1000;
const h = new CaptureStreamTestHelper2D(50, 50);
var checkHasFrame = video => h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, h.black, threshold);
info("Checking that we have a frame, got [" +
Array.slice(px) + "]. Pass=" + result);
return result;
});
var checkVideoPlaying = video => checkHasFrame(video)
.then(() => {
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isPixelNot(px, startPixel, threshold)
info("Checking playing, [" + Array.slice(px) + "] vs [" +
Array.slice(startPixel.data) + "]. Pass=" + result);
return result;
});
});
var checkVideoPaused = video => checkHasFrame(video)
.then(() => {
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, startPixel, threshold);
info("Checking paused, [" + Array.slice(px) + "] vs [" +
Array.slice(startPixel.data) + "]. Pass=" + result);
return result;
}, pausedTimeout);
}).then(result => ok(!result, "Frame shouldn't change within " + pausedTimeout / 1000 + " seconds."));
runTest(() => getUserMedia({video: true, fake: true})
.then(stream => {
gUMVideoElement =
createMediaElement("gUMVideo", "local", "gUMVideo", false);
gUMVideoElement.srcObject = stream;
gUMVideoElement.play();
info("Capturing");
captureStreamElement =
createMediaElement("captureStream", "local", "captureStream", false);
captureStreamElement.srcObject = gUMVideoElement.mozCaptureStream();
captureStreamElement.play();
return checkVideoPlaying(captureStreamElement);
})
.then(() => {
info("Video flowing. Pausing.");
gUMVideoElement.pause();
return checkVideoPaused(captureStreamElement);
})
.then(() => {
info("Video stopped flowing. Playing.");
gUMVideoElement.play();
return checkVideoPlaying(captureStreamElement);
})
.then(() => {
info("Video flowing. Removing source.");
var stream = gUMVideoElement.srcObject;
gUMVideoElement.srcObject = null;
return checkVideoPaused(captureStreamElement).then(() => stream);
})
.then(stream => {
info("Video stopped flowing. Setting source.");
gUMVideoElement.srcObject = stream;
return checkVideoPlaying(captureStreamElement);
})
.then(() => {
info("Video flowing. Changing source by track manipulation. Remove first.");
var track = gUMVideoElement.srcObject.getTracks()[0];
gUMVideoElement.srcObject.removeTrack(track);
return checkVideoPaused(captureStreamElement).then(() => track);
})
.then(track => {
info("Video paused. Changing source by track manipulation. Add first.");
gUMVideoElement.srcObject.addTrack(track);
gUMVideoElement.play();
return checkVideoPlaying(captureStreamElement);
})
.then(() => ok(true, "Test passed."))
.catch(e => ok(false, "Test failed: " + e + (e.stack ? "\n" + e.stack : ""))));
</script>
</pre>
</body>
</html>

Просмотреть файл

@ -32,7 +32,6 @@ runNetworkTest(function (options) {
test.pcLocal.removeSender(videoSenderIndex);
test.pcLocal.attachLocalTrack(stream.getTracks()[0], localStream);
let onNextLoop = wait(0);
eventsPromise = haveEvent(remoteStream, "addtrack", wait(50000, "No addtrack event"))
.then(trackEvent => {
ok(trackEvent instanceof MediaStreamTrackEvent,
@ -43,10 +42,7 @@ runNetworkTest(function (options) {
is(trackEvent.track.readyState, "live",
"added track should be live");
})
.then(() => haveEvent(remoteStream, "addtrack", onNextLoop)
.then(() => Promise.reject("Unexpected addtrack event for remote stream " + remoteStream.id),
() => Promise.resolve())
);
.then(() => haveNoEvent(remoteStream, "addtrack"));
remoteStream.addEventListener("removetrack",
function onRemovetrack(trackEvent) {
ok(false, "UA shouldn't raise 'removetrack' when receiving peer connection");

Просмотреть файл

@ -366,8 +366,8 @@ AudioContext::CreateMediaElementSource(HTMLMediaElement& aMediaElement,
return nullptr;
}
RefPtr<DOMMediaStream> stream = aMediaElement.MozCaptureStream(aRv,
mDestination->Stream()->Graph());
RefPtr<DOMMediaStream> stream =
aMediaElement.CaptureAudio(aRv, mDestination->Stream()->Graph());
if (aRv.Failed()) {
return nullptr;
}

Просмотреть файл

@ -524,7 +524,9 @@ AudioDestinationNode::WindowSuspendChanged(nsSuspendedTypes aSuspend)
NS_LITERAL_STRING("mozinterruptend") :
NS_LITERAL_STRING("mozinterruptbegin"));
mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, !suspended);
DisabledTrackMode disabledMode = suspended ? DisabledTrackMode::SILENCE_BLACK
: DisabledTrackMode::ENABLED;
mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, disabledMode);
return NS_OK;
}

Просмотреть файл

@ -582,7 +582,7 @@ AudioNodeStream::ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags)
}
}
if (mDisabledTrackIDs.Contains(static_cast<TrackID>(AUDIO_TRACK))) {
if (GetDisabledTrackMode(static_cast<TrackID>(AUDIO_TRACK)) != DisabledTrackMode::ENABLED) {
for (uint32_t i = 0; i < outputCount; ++i) {
mLastChunks[i].SetNull(WEBAUDIO_BLOCK_SIZE);
}
@ -621,7 +621,7 @@ AudioNodeStream::ProduceOutputBeforeInput(GraphTime aFrom)
mEngine->ProduceBlockBeforeInput(this, aFrom, &mLastChunks[0]);
NS_ASSERTION(mLastChunks[0].GetDuration() == WEBAUDIO_BLOCK_SIZE,
"Invalid WebAudio chunk size");
if (mDisabledTrackIDs.Contains(static_cast<TrackID>(AUDIO_TRACK))) {
if (GetDisabledTrackMode(static_cast<TrackID>(AUDIO_TRACK)) != DisabledTrackMode::ENABLED) {
mLastChunks[0].SetNull(WEBAUDIO_BLOCK_SIZE);
}
}

Просмотреть файл

@ -39,9 +39,11 @@ MediaStreamAudioDestinationNode::MediaStreamAudioDestinationNode(AudioContext* a
RefPtr<MediaStreamTrackSource> source =
new BasicUnstoppableTrackSource(doc->NodePrincipal(),
MediaSourceEnum::AudioCapture);
mDOMStream->CreateDOMTrack(AudioNodeStream::AUDIO_TRACK,
MediaSegment::AUDIO, source,
MediaTrackConstraints());
RefPtr<MediaStreamTrack> track =
mDOMStream->CreateDOMTrack(AudioNodeStream::AUDIO_TRACK,
MediaSegment::AUDIO, source,
MediaTrackConstraints());
mDOMStream->AddTrackInternal(track);
ProcessedMediaStream* outputStream = mDOMStream->GetInputStream()->AsProcessedStream();
MOZ_ASSERT(!!outputStream);

Просмотреть файл

@ -60,7 +60,11 @@ MediaStreamAudioSourceNode::Create(AudioContext* aContext,
void
MediaStreamAudioSourceNode::Init(DOMMediaStream* aMediaStream, ErrorResult& aRv)
{
MOZ_ASSERT(aMediaStream);
if (!aMediaStream) {
aRv.Throw(NS_ERROR_FAILURE);
return;
}
MediaStream* inputStream = aMediaStream->GetPlaybackStream();
MediaStreamGraph* graph = Context()->Graph();
if (NS_WARN_IF(graph != inputStream->Graph())) {

Просмотреть файл

@ -13,13 +13,12 @@
enum AnimationPlayState { "idle", "pending", "running", "paused", "finished" };
[Func="nsDocument::IsElementAnimateEnabled",
Constructor (optional KeyframeEffectReadOnly? effect = null,
Constructor (optional AnimationEffectReadOnly? effect = null,
optional AnimationTimeline? timeline)]
interface Animation : EventTarget {
attribute DOMString id;
// Bug 1049975: Make 'effect' writeable
[Func="nsDocument::IsWebAnimationsEnabled", Pure]
readonly attribute AnimationEffectReadOnly? effect;
attribute AnimationEffectReadOnly? effect;
[Func="nsDocument::IsWebAnimationsEnabled"]
attribute AnimationTimeline? timeline;
[BinaryName="startTimeAsDouble"]

Просмотреть файл

@ -651,7 +651,7 @@ SampleAnimations(Layer* aLayer, TimeStamp aPoint)
animation.easingFunction());
ComputedTiming computedTiming =
dom::KeyframeEffectReadOnly::GetComputedTimingAt(
dom::AnimationEffectReadOnly::GetComputedTimingAt(
Nullable<TimeDuration>(elapsedDuration), timing,
animation.playbackRate());
@ -685,6 +685,7 @@ SampleAnimations(Layer* aLayer, TimeStamp aPoint)
case eCSSProperty_opacity:
{
layerComposite->SetShadowOpacity(interpolatedValue.get_float());
layerComposite->SetShadowOpacitySetByAnimation(true);
break;
}
case eCSSProperty_transform:

Просмотреть файл

@ -1333,6 +1333,7 @@ LayerComposite::LayerComposite(LayerManagerComposite *aManager)
, mCompositor(aManager->GetCompositor())
, mShadowOpacity(1.0)
, mShadowTransformSetByAnimation(false)
, mShadowOpacitySetByAnimation(false)
, mDestroyed(false)
, mLayerComposited(false)
{ }

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше