MozReview-Commit-ID: BOSPzTn48B2
This commit is contained in:
Wes Kocher 2017-04-26 15:34:31 -07:00
Родитель ce5ccb6a8c 058d8d941c
Коммит 7954a3d443
216 изменённых файлов: 5067 добавлений и 2956 удалений

Просмотреть файл

@ -12,10 +12,9 @@ skip-if = (os == "linux" && debug) # linux: bug 976544
[browser_devices_get_user_media_multi_process.js]
skip-if = e10s && (asan || debug) # bug 1347625
[browser_devices_get_user_media_screen.js]
skip-if = (os == "linux") || (os == "win" && !debug) # bug 1320994 for linux opt, bug 1338038 for windows and linux debug
[browser_devices_get_user_media_tear_off_tab.js]
[browser_devices_get_user_media_unprompted_access.js]
[browser_devices_get_user_media_unprompted_access_in_frame.js]
[browser_devices_get_user_media_unprompted_access_tear_off_tab.js]
skip-if = (os == "linux") || (os == "win" && bits == 64) # linux: bug 1331616, win8: bug 1334752
skip-if = (os == "win" && bits == 64) # win8: bug 1334752
[browser_webrtc_hooks.js]

Просмотреть файл

@ -410,7 +410,7 @@ var gTests = [
yield check({video: true, audio: true});
info("Stop the camera, everything should stop.");
yield stopSharing("camera", false, true);
yield stopSharing("camera");
info("Now, share only the screen...");
indicator = promiseIndicatorWindow();
@ -423,7 +423,7 @@ var gTests = [
yield check({video: true, audio: true, screen: "Screen"});
info("Stop the camera, this should stop everything.");
yield stopSharing("camera", false, true);
yield stopSharing("camera");
}
},

Просмотреть файл

@ -62,7 +62,7 @@ var gTests = [
SitePermissions.remove(null, "microphone", gBrowser.selectedBrowser);
// After closing all streams, gUM(audio+camera) causes a prompt.
yield closeStream(false, 0, 2);
yield closeStream();
promise = promisePopupNotificationShown("webRTC-shareDevices");
yield promiseRequestDevice(true, true);
yield promise;
@ -169,7 +169,7 @@ var gTests = [
yield checkSharingUI({audio: false, video: true});
// close all streams
yield closeStream(false, 0, 2);
yield closeStream();
}
},
@ -241,7 +241,7 @@ var gTests = [
yield checkSharingUI({audio: true, video: false});
// close all streams
yield closeStream(false, 0, 2);
yield closeStream();
}
}

Просмотреть файл

@ -57,7 +57,7 @@ var gTests = [
yield expectObserverCalled("recording-device-events");
// close the stream
yield closeStream(false, "frame1", 2);
yield closeStream(false, "frame1");
}
},
@ -197,7 +197,7 @@ var gTests = [
yield expectObserverCalled("recording-window-ended");
// close the stream
yield closeStream(false);
yield closeStream();
SitePermissions.remove(null, "screen", gBrowser.selectedBrowser);
SitePermissions.remove(null, "camera", gBrowser.selectedBrowser);
SitePermissions.remove(null, "microphone", gBrowser.selectedBrowser);

Просмотреть файл

@ -41,7 +41,6 @@ var gTests = [
yield Promise.all(promises);
promises = [promiseObserverCalled("recording-device-events"),
promiseObserverCalled("recording-device-events"),
promiseObserverCalled("recording-window-ended")];
yield BrowserTestUtils.closeWindow(win);
yield Promise.all(promises);

Просмотреть файл

@ -215,7 +215,7 @@ function expectObserverCalled(aTopic) {
});
}
function expectNoObserverCalled(aIgnoreDeviceEvents = false) {
function expectNoObserverCalled() {
return new Promise(resolve => {
let mm = _mm();
mm.addMessageListener("Test:ExpectNoObserverCalled:Reply",
@ -225,15 +225,7 @@ function expectNoObserverCalled(aIgnoreDeviceEvents = false) {
if (!data[topic])
continue;
// If we are stopping tracks that were created from 2 different
// getUserMedia calls, the "recording-device-events" notification is
// fired twice on Windows and Mac, and intermittently twice on Linux.
if (topic == "recording-device-events" && aIgnoreDeviceEvents) {
todo(false, "Got " + data[topic] + " unexpected " + topic +
" notifications, see bug 1320994");
} else {
is(data[topic], 0, topic + " notification unexpected");
}
is(data[topic], 0, topic + " notification unexpected");
}
resolve();
});
@ -354,8 +346,7 @@ function getMediaCaptureState() {
});
}
function* stopSharing(aType = "camera", aShouldKeepSharing = false,
aExpectDoubleRecordingEvent = false) {
function* stopSharing(aType = "camera", aShouldKeepSharing = false) {
let promiseRecordingEvent = promiseObserverCalled("recording-device-events");
gIdentityHandler._identityBox.click();
let permissions = document.getElementById("identity-popup-permission-list");
@ -372,7 +363,7 @@ function* stopSharing(aType = "camera", aShouldKeepSharing = false,
if (!aShouldKeepSharing)
yield expectObserverCalled("recording-window-ended");
yield expectNoObserverCalled(aExpectDoubleRecordingEvent);
yield expectNoObserverCalled();
if (!aShouldKeepSharing)
yield* checkNotSharing();
@ -391,16 +382,13 @@ function promiseRequestDevice(aRequestAudio, aRequestVideo, aFrameId, aType,
});
}
function* closeStream(aAlreadyClosed, aFrameId, aStreamCount = 1) {
function* closeStream(aAlreadyClosed, aFrameId) {
yield expectNoObserverCalled();
let promises;
if (!aAlreadyClosed) {
promises = [];
for (let i = 0; i < aStreamCount; i++) {
promises.push(promiseObserverCalled("recording-device-events"));
}
promises.push(promiseObserverCalled("recording-window-ended"));
promises = [promiseObserverCalled("recording-device-events"),
promiseObserverCalled("recording-window-ended")];
}
info("closing the stream");

Просмотреть файл

@ -3334,7 +3334,7 @@ var SessionStoreInternal = {
}
}
if (selectTab > 0) {
if (selectTab > 0 && selectTab <= tabs.length) {
// The state we're restoring wants to select a particular tab. This
// implies that we're overwriting tabs.
let currentIndex = tabbrowser.tabContainer.selectedIndex;
@ -4404,8 +4404,8 @@ var SessionStoreInternal = {
if (tIndex + 1 < window.selected)
window.selected -= 1;
else if (tIndex + 1 == window.selected)
pinnedWindowState.selected = pinnedWindowState.tabs.length + 2;
// + 2 because the tab isn't actually in the array yet
pinnedWindowState.selected = pinnedWindowState.tabs.length + 1;
// + 1 because the tab isn't actually in the array yet
// Now add the pinned tab to our window
pinnedWindowState.tabs =

Просмотреть файл

@ -55,13 +55,12 @@ nsChromeRegistry::LogMessage(const char* aMsg, ...)
va_list args;
va_start(args, aMsg);
char* formatted = mozilla::Vsmprintf(aMsg, args);
mozilla::SmprintfPointer formatted = mozilla::Vsmprintf(aMsg, args);
va_end(args);
if (!formatted)
return;
console->LogStringMessage(NS_ConvertUTF8toUTF16(formatted).get());
mozilla::SmprintfFree(formatted);
console->LogStringMessage(NS_ConvertUTF8toUTF16(formatted.get()).get());
}
void
@ -80,7 +79,7 @@ nsChromeRegistry::LogMessageWithContext(nsIURI* aURL, uint32_t aLineNumber, uint
va_list args;
va_start(args, aMsg);
char* formatted = mozilla::Vsmprintf(aMsg, args);
mozilla::SmprintfPointer formatted = mozilla::Vsmprintf(aMsg, args);
va_end(args);
if (!formatted)
return;
@ -89,11 +88,10 @@ nsChromeRegistry::LogMessageWithContext(nsIURI* aURL, uint32_t aLineNumber, uint
if (aURL)
aURL->GetSpec(spec);
rv = error->Init(NS_ConvertUTF8toUTF16(formatted),
rv = error->Init(NS_ConvertUTF8toUTF16(formatted.get()),
NS_ConvertUTF8toUTF16(spec),
EmptyString(),
aLineNumber, 0, flags, "chrome registration");
mozilla::SmprintfFree(formatted);
if (NS_FAILED(rv))
return;

Просмотреть файл

@ -5,6 +5,9 @@
"use strict";
// Toggling the toolbox three time can take more than 45s on slow test machine
requestLongerTimeout(2);
// Test toggling the toolbox quickly and see if there is any race breaking it.
const URL = "data:text/html;charset=utf-8,Toggling devtools quickly";

Просмотреть файл

@ -10,6 +10,9 @@
*/
const DevTools = {
chromeWindowType: "navigator:browser",
getToolbox: function () {
return {};
}
};
exports.gDevTools = DevTools;

Просмотреть файл

@ -0,0 +1,596 @@
@import "chrome://devtools/skin/widgets.css";
@import "resource://devtools/client/themes/light-theme.css";
/* Webconsole specific theme variables */
.theme-light,
.theme-firebug {
--error-color: #FF0000;
--error-background-color: #FFEBEB;
--warning-background-color: #FFFFC8;
}
/* General output styles */
a {
-moz-user-focus: normal;
-moz-user-input: enabled;
cursor: pointer;
text-decoration: underline;
}
/* Workaround for Bug 575675 - FindChildWithRules aRelevantLinkVisited
* assertion when loading HTML page with links in XUL iframe */
*:visited { }
.webconsole-filterbar-wrapper {
flex-grow: 0;
}
.webconsole-filterbar-primary {
display: flex;
}
.devtools-toolbar.webconsole-filterbar-secondary {
height: initial;
}
.webconsole-filterbar-primary .devtools-plaininput {
flex: 1 1 100%;
}
.webconsole-output.hideTimestamps > .message > .timestamp {
display: none;
}
.message.startGroup .message-body > .objectBox-string,
.message.startGroupCollapsed .message-body > .objectBox-string {
color: var(--theme-body-color);
font-weight: bold;
}
.webconsole-output-wrapper .message > .icon {
margin: 3px 0 0 0;
padding: 0 0 0 6px;
}
.message.error > .icon::before {
background-position: -12px -36px;
}
.message.warn > .icon::before {
background-position: -24px -36px;
}
.message.info > .icon::before {
background-position: -36px -36px;
}
.message.network .method {
margin-inline-end: 5px;
}
.network .message-flex-body > .message-body {
display: flex;
}
.webconsole-output-wrapper .message .indent {
display: inline-block;
border-inline-end: solid 1px var(--theme-splitter-color);
}
.message.startGroup .indent,
.message.startGroupCollapsed .indent {
border-inline-end-color: transparent;
margin-inline-end: 5px;
}
.message.startGroup .icon,
.message.startGroupCollapsed .icon {
display: none;
}
/* console.table() */
.new-consoletable {
width: 100%;
border-collapse: collapse;
--consoletable-border: 1px solid var(--table-splitter-color);
}
.new-consoletable thead,
.new-consoletable tbody {
background-color: var(--theme-body-background);
}
.new-consoletable th {
background-color: var(--theme-selection-background);
color: var(--theme-selection-color);
margin: 0;
padding: 5px 0 0;
font-weight: inherit;
border-inline-end: var(--consoletable-border);
border-bottom: var(--consoletable-border);
}
.new-consoletable tr:nth-of-type(even) {
background-color: var(--table-zebra-background);
}
.new-consoletable td {
padding: 3px 4px;
min-width: 100px;
-moz-user-focus: normal;
color: var(--theme-body-color);
border-inline-end: var(--consoletable-border);
height: 1.25em;
line-height: 1.25em;
}
/* Layout */
.webconsole-output {
flex: 1;
direction: ltr;
overflow: auto;
-moz-user-select: text;
position: relative;
}
:root,
body,
#app-wrapper {
height: 100%;
margin: 0;
padding: 0;
}
body {
overflow: hidden;
}
#app-wrapper {
display: flex;
flex-direction: column;
}
:root, body {
margin: 0;
padding: 0;
height: 100%;
}
#app-wrapper {
height: 100%;
display: flex;
flex-direction: column;
}
#left-wrapper {
flex: 1;
display: flex;
flex-direction: column;
}
#output-container {
flex: 1;
overflow: hidden;
}
.webconsole-output-wrapper {
display: flex;
flex-direction: column;
height: 100%;
}
.message {
display: flex;
padding: 0 7px;
width: 100%;
box-sizing: border-box;
}
.message > .prefix,
.message > .timestamp {
flex: none;
color: var(--theme-comment);
margin: 3px 6px 0 0;
}
.message > .indent {
flex: none;
}
.message > .icon {
flex: none;
margin: 3px 6px 0 0;
padding: 0 4px;
height: 1em;
align-self: flex-start;
}
.theme-firebug .message > .icon {
margin: 0;
margin-inline-end: 6px;
}
.theme-firebug .message[severity="error"],
.theme-light .message.error,
.theme-firebug .message.error {
color: var(--error-color);
background-color: var(--error-background-color);
}
.theme-firebug .message[severity="warn"],
.theme-light .message.warn,
.theme-firebug .message.warn {
background-color: var(--warning-background-color);
}
.message > .icon::before {
content: "";
background-image: url(chrome://devtools/skin/images/webconsole.svg);
background-position: 12px 12px;
background-repeat: no-repeat;
background-size: 72px 60px;
width: 12px;
height: 12px;
display: inline-block;
}
.theme-light .message > .icon::before {
background-image: url(chrome://devtools/skin/images/webconsole.svg#light-icons);
}
.message > .message-body-wrapper {
flex: auto;
min-width: 0px;
margin: 3px;
}
/* The red bubble that shows the number of times a message is repeated */
.message-repeats {
-moz-user-select: none;
flex: none;
margin: 2px 6px;
padding: 0 6px;
height: 1.25em;
color: white;
background-color: red;
border-radius: 40px;
font: message-box;
font-size: 0.9em;
font-weight: 600;
}
.message-repeats[value="1"] {
display: none;
}
.message-location {
max-width: 40%;
}
.stack-trace {
/* The markup contains extra whitespace to improve formatting of clipboard text.
Make sure this whitespace doesn't affect the HTML rendering */
white-space: normal;
}
.stack-trace .frame-link-source,
.message-location .frame-link-source {
/* Makes the file name truncated (and ellipsis shown) on the left side */
direction: rtl;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.stack-trace .frame-link-source-inner,
.message-location .frame-link-source-inner {
/* Enforce LTR direction for the file name - fixes bug 1290056 */
direction: ltr;
unicode-bidi: embed;
}
.stack-trace .frame-link-function-display-name {
max-width: 50%;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.message-flex-body {
display: flex;
}
.message-body > * {
white-space: pre-wrap;
word-wrap: break-word;
}
.message-flex-body > .message-body {
display: block;
flex: auto;
}
#output-container.hideTimestamps > .message {
padding-inline-start: 0;
margin-inline-start: 7px;
width: calc(100% - 7px);
}
#output-container.hideTimestamps > .message > .timestamp {
display: none;
}
#output-container.hideTimestamps > .message > .indent {
background-color: var(--theme-body-background);
}
.message:hover {
background-color: var(--theme-selection-background-semitransparent) !important;
}
.theme-light .message.error {
background-color: rgba(255, 150, 150, 0.3);
}
.theme-dark .message.error {
background-color: rgba(235, 83, 104, 0.17);
}
.console-string {
color: var(--theme-highlight-lightorange);
}
.theme-selected .console-string,
.theme-selected .cm-number,
.theme-selected .cm-variable,
.theme-selected .kind-ArrayLike {
color: #f5f7fa !important; /* Selection Text Color */
}
.message.network.error > .icon::before {
background-position: -12px 0;
}
.message.network > .message-body {
display: flex;
flex-wrap: wrap;
}
.message.network .method {
flex: none;
}
.message.network:not(.navigation-marker) .url {
flex: 1 1 auto;
/* Make sure the URL is very small initially, let flex change width as needed. */
width: 100px;
min-width: 5em;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.message.network .status {
flex: none;
margin-inline-start: 6px;
}
.message.network.mixed-content .url {
color: var(--theme-highlight-red);
}
.message .learn-more-link {
color: var(--theme-highlight-blue);
margin: 0 6px;
}
.message.network .xhr {
background-color: var(--theme-body-color-alt);
color: var(--theme-body-background);
border-radius: 3px;
font-weight: bold;
font-size: 10px;
padding: 2px;
line-height: 10px;
margin-inline-start: 3px;
margin-inline-end: 1ex;
}
.message.cssparser > .indent {
border-inline-end: solid #00b6f0 6px;
}
.message.cssparser.error > .icon::before {
background-position: -12px -12px;
}
.message.cssparser.warn > .icon::before {
background-position: -24px -12px;
}
.message.exception > .indent {
border-inline-end: solid #fb9500 6px;
}
.message.exception.error > .icon::before {
background-position: -12px -24px;
}
.message.exception.warn > .icon::before {
background-position: -24px -24px;
}
.message.console-api > .indent {
border-inline-end: solid #cbcbcb 6px;
}
.message.server > .indent {
border-inline-end: solid #90B090 6px;
}
/* Input and output styles */
.message.command > .indent,
.message.result > .indent {
border-inline-end: solid #808080 6px;
}
.message.command > .icon::before {
background-position: -48px -36px;
}
.message.result > .icon::before {
background-position: -60px -36px;
}
/* JSTerm Styles */
#jsterm-wrapper {
flex: 0;
}
.jsterm-input-container {
background-color: var(--theme-tab-toolbar-background);
border-top: 1px solid var(--theme-splitter-color);
}
.theme-light .jsterm-input-container {
/* For light theme use a white background for the input - it looks better
than off-white */
background-color: #fff;
border-top-color: #e0e0e0;
}
.theme-firebug .jsterm-input-container {
border-top: 1px solid #ccc;
}
.jsterm-input-node,
.jsterm-complete-node {
border: none;
padding: 0;
padding-inline-start: 20px;
margin: 0;
-moz-appearance: none; appearance: none;
background-color: transparent;
}
.jsterm-input-node[focused="true"] {
background-image: var(--theme-command-line-image-focus);
box-shadow: none;
}
.jsterm-complete-node {
color: var(--theme-comment);
}
.jsterm-input-node {
/* Always allow scrolling on input - it auto expands in js by setting height,
but don't want it to get bigger than the window. 24px = toolbar height. */
max-height: calc(90vh - 24px);
background-image: var(--theme-command-line-image);
background-repeat: no-repeat;
background-size: 16px 16px;
background-position: 4px 50%;
color: var(--theme-content-color1);
}
:-moz-any(.jsterm-input-node,
.jsterm-complete-node) > .textbox-input-box > .textbox-textarea {
overflow-x: hidden;
/* Set padding for console input on textbox to make sure it is inlcuded in
scrollHeight that is used when resizing JSTerminal's input. Note: textbox
default style has important already */
padding: 4px 0 !important;
}
#webconsole-notificationbox,
.jsterm-stack-node {
width: 100%;
}
.message.security > .indent {
border-inline-end: solid red 6px;
}
.message.security.error > .icon::before {
background-position: -12px -48px;
}
.message.security.warn > .icon::before {
background-position: -24px -48px;
}
.navigation-marker {
color: #aaa;
background: linear-gradient(#aaa, #aaa) no-repeat left 50%;
background-size: 100% 2px;
margin-top: 6px;
margin-bottom: 6px;
font-size: 0.9em;
}
.navigation-marker .url {
padding-inline-end: 9px;
text-decoration: none;
background: var(--theme-body-background);
}
.theme-light .navigation-marker .url {
background: #fff;
}
.stacktrace {
display: none;
padding: 5px 10px;
margin: 5px 0 0 0;
overflow-y: auto;
border: 1px solid var(--theme-splitter-color);
border-radius: 3px;
}
.theme-light .message.error .stacktrace {
background-color: rgba(255, 255, 255, 0.5);
}
.theme-dark .message.error .stacktrace {
background-color: rgba(0, 0, 0, 0.5);
}
.message.open .stacktrace {
display: block;
}
.message .theme-twisty {
display: inline-block;
vertical-align: middle;
margin: 3px 0 0 0;
flex-shrink: 0;
}
/*Do not mirror the twisty because container force to ltr */
.message .theme-twisty:dir(rtl),
.message .theme-twisty:-moz-locale-dir(rtl) {
transform: none;
}
.cm-s-mozilla a[class] {
font-style: italic;
text-decoration: none;
}
.cm-s-mozilla a[class]:hover,
.cm-s-mozilla a[class]:focus {
text-decoration: underline;
}
a.learn-more-link.webconsole-learn-more-link {
font-style: normal;
}
/* Open DOMNode in inspector button */
.open-inspector {
background: url(chrome://devtools/skin/images/vview-open-inspector.png) no-repeat 0 0;
padding-left: 16px;
margin-left: 5px;
cursor: pointer;
}
.elementNode:hover .open-inspector,
.open-inspector:hover {
filter: url(images/filters.svg#checked-icon-state);
}
.elementNode:hover .open-inspector:active,
.open-inspector:active {
filter: url(images/filters.svg#checked-icon-state) brightness(0.9);
}

Просмотреть файл

@ -1,3 +1,7 @@
{
"presets": ["es2015"]
"env": {
"test": {
"presets": ["es2015"]
}
}
}

Просмотреть файл

@ -0,0 +1,29 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* eslint-env node */
"use strict";
const fs = require("fs");
const path = require("path");
function getConfig() {
if (process.env.TARGET === "firefox-panel") {
return require("../configs/firefox-panel.json");
}
const developmentConfig = require("../configs/development.json");
let localConfig = {};
if (fs.existsSync(path.resolve(__dirname, "../configs/local.json"))) {
localConfig = require("../configs/local.json");
}
return Object.assign({}, developmentConfig, localConfig);
}
module.exports = {
getConfig,
};

Просмотреть файл

@ -0,0 +1,19 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* eslint-env node */
"use strict";
const toolbox = require("devtools-launchpad/index");
const feature = require("devtools-config");
const { getConfig } = require("./configure");
const envConfig = getConfig();
feature.setConfig(envConfig);
let webpackConfig = require("../webpack.config");
toolbox.startDevServer(envConfig, webpackConfig, __dirname);

Просмотреть файл

@ -0,0 +1,35 @@
{
"title": "Console",
"environment": "development",
"baseWorkerURL": "http://localhost:8000/public/build/",
"host": "",
"theme": "light",
"dir": "ltr",
"features": {
},
"logging": {
"client": false,
"firefoxProxy": false,
"actions": false
},
"chrome": {
"debug": false,
"host": "localhost",
"port": 9222
},
"node": {
"debug": false,
"host": "localhost",
"port": 9229
},
"firefox": {
"webSocketConnection": false,
"proxyHost": "localhost:9000",
"webSocketHost": "localhost:6080",
"mcPath": "./firefox"
},
"development": {
"serverPort": 8000,
"examplesPort": 7999
}
}

Просмотреть файл

@ -0,0 +1,95 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* eslint-env browser */
"use strict";
const React = require("react");
const ReactDOM = require("react-dom");
const { EventEmitter } = require("devtools-modules");
const { Services: { appinfo, pref } } = require("devtools-modules");
const { bootstrap } = require("devtools-launchpad");
EventEmitter.decorate(window);
require("../../themes/new-webconsole.css");
require("../../shared/components/reps/reps.css");
pref("devtools.debugger.remote-timeout", 10000);
pref("devtools.hud.loglimit", 1000);
pref("devtools.webconsole.filter.error", true);
pref("devtools.webconsole.filter.warn", true);
pref("devtools.webconsole.filter.info", true);
pref("devtools.webconsole.filter.log", true);
pref("devtools.webconsole.filter.debug", true);
pref("devtools.webconsole.filter.css", false);
pref("devtools.webconsole.filter.net", false);
pref("devtools.webconsole.filter.netxhr", false);
pref("devtools.webconsole.ui.filterbar", false);
pref("devtools.webconsole.inputHistoryCount", 50);
pref("devtools.webconsole.persistlog", false);
pref("devtools.webconsole.timestampMessages", false);
pref("devtools.webconsole.autoMultiline", true);
const NewConsoleOutputWrapper = require("../new-console-output/new-console-output-wrapper");
const NewWebConsoleFrame = require("../new-webconsole").NewWebConsoleFrame;
// Replicate the DOM that the root component lives within
const el = document.createElement("div");
el.style.flex = "1";
el.innerHTML = `
<div id="app-wrapper" class="theme-body">
<div id="output-container" role="document" aria-live="polite" />
</div>
`;
document.querySelector("#mount").appendChild(el);
document.documentElement.classList.add("theme-light");
// Copied from netmonitor/index.js:
window.addEventListener("DOMContentLoaded", () => {
for (let link of document.head.querySelectorAll("link")) {
link.href = link.href.replace(/(resource|chrome)\:\/\//, "/");
}
if (appinfo.OS === "Darwin") {
document.documentElement.setAttribute("platform", "mac");
} else if (appinfo.OS === "Linux") {
document.documentElement.setAttribute("platform", "linux");
} else {
document.documentElement.setAttribute("platform", "win");
}
});
let consoleFrame;
function onConnect(connection) {
// If we are on the main dashboard don't render the component
if (!connection || !connection.tabConnection || !connection.tabConnection.tabTarget) {
return;
}
// Stub out properties that are received from hudservice
const owner = {
iframeWindow: window,
chromeWindow: window,
hudId: "hud_0",
target: connection.tabConnection.tabTarget,
_browserConsole: false,
NewConsoleOutputWrapper,
};
consoleFrame = new NewWebConsoleFrame(owner);
consoleFrame.init().then(function () {
console.log("NewWebConsoleFrame initialized");
});
}
// This is just a hack until the local dev environment includes jsterm
window.evaluateJS = function (input) {
consoleFrame.webConsoleClient.evaluateJSAsync(`${input}`, function (r) {
consoleFrame.newConsoleOutput.dispatchMessageAdd(r);
}, {});
};
bootstrap(React, ReactDOM, el).then(onConnect);

Просмотреть файл

@ -0,0 +1,17 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
function JSTerm(webConsoleFrame) {
this.hud = webConsoleFrame;
this.hudId = this.hud.hudId;
this.historyLoaded = new Promise(r => {
r();
});
this.openVariablesView = () => { };
this.init = () => { };
}
module.exports.JSTerm = JSTerm;

Просмотреть файл

@ -7,11 +7,11 @@
"use strict";
const actionModules = [
"enhancers",
"filters",
"messages",
"ui",
].map(filename => require(`./${filename}`));
require("./enhancers"),
require("./filters"),
require("./messages"),
require("./ui"),
];
const actions = Object.assign({}, ...actionModules);

Просмотреть файл

@ -14,7 +14,7 @@
<script type="text/javascript">
"use strict";
const numMessages = 2000;
const numMessages = 4000;
const testPackets = Array.from({length: numMessages}).map((el, id) => ({
"from": "server1.conn4.child1/consoleActor2",
"type": "consoleAPICall",
@ -49,7 +49,12 @@ async function timeit(cb) {
}
window.onload = async function () {
// This test does costly work multiple times to have better performance data.
SimpleTest.requestLongerTimeout(3);
try {
const Services = browserRequire("Services");
Services.prefs.setIntPref("devtools.hud.loglimit", numMessages);
const NewConsoleOutputWrapper = browserRequire(
"devtools/client/webconsole/new-console-output/new-console-output-wrapper");
const EventEmitter = browserRequire("devtools/shared/event-emitter");
@ -64,25 +69,31 @@ window.onload = async function () {
wrapper.init();
let times = [];
const iterations = 10;
const iterations = 25;
const lastPacket = testPackets.pop();
for (let i = 0; i < iterations; i++) {
let time = await timeit(() => {
let time = await timeit(async () => {
testPackets.forEach((packet) => wrapper.dispatchMessageAdd(packet));
// Only wait for the last packet to minimize work.
return wrapper.dispatchMessageAdd(lastPacket, true);
await wrapper.dispatchMessageAdd(lastPacket, true);
await new Promise(resolve => requestAnimationFrame(resolve));
});
info(`took ${time} ms to render messages`);
times.push(time);
info("Clear the console");
await new Promise(resolve => requestAnimationFrame(() => resolve()));
// Clear the console
wrapper.dispatchMessagesClear();
await new Promise(resolve => requestAnimationFrame(resolve));
}
times.sort();
let totalTime = times.reduce((sum, t) => sum + t);
let avg = totalTime / times.length;
info(`On average, it took ${avg} ms to render ${numMessages} messages`);
let median = times.length % 2 !== 0
? times[Math.floor(times.length / 2)]
: (times[(times.length / 2) - 1] + times[times.length / 2]) / 2;
info(`On average, it took ${avg} ms (median ${median} ms) ` +
`to render ${numMessages} messages`);
ok(true, "Yay, it didn't time out!");
} catch (e) {

Просмотреть файл

@ -6,7 +6,10 @@
const L10n = require("devtools/client/webconsole/new-console-output/test/fixtures/L10n");
const Utils = {
L10n
L10n,
supportsString: function (s) {
return s;
}
};
module.exports = {

Просмотреть файл

@ -0,0 +1,269 @@
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {Utils: WebConsoleUtils} = require("devtools/client/webconsole/utils");
const EventEmitter = require("devtools/shared/event-emitter");
const promise = require("promise");
const defer = require("devtools/shared/defer");
const Services = require("Services");
const { gDevTools } = require("devtools/client/framework/devtools");
const { JSTerm } = require("devtools/client/webconsole/jsterm");
const { WebConsoleConnectionProxy } = require("devtools/client/webconsole/webconsole-connection-proxy");
const PREF_MESSAGE_TIMESTAMP = "devtools.webconsole.timestampMessages";
// XXX: This file is incomplete (see bug 1326937).
// It's used when loading the webconsole with devtools-launchpad, but will ultimately be
// the entry point for the new frontend
/**
* A WebConsoleFrame instance is an interactive console initialized *per target*
* that displays console log data as well as provides an interactive terminal to
* manipulate the target's document content.
*
* The WebConsoleFrame is responsible for the actual Web Console UI
* implementation.
*
* @constructor
* @param object webConsoleOwner
* The WebConsole owner object.
*/
function NewWebConsoleFrame(webConsoleOwner) {
this.owner = webConsoleOwner;
this.hudId = this.owner.hudId;
this.isBrowserConsole = this.owner._browserConsole;
this.NEW_CONSOLE_OUTPUT_ENABLED = true;
this.window = this.owner.iframeWindow;
this._onToolboxPrefChanged = this._onToolboxPrefChanged.bind(this);
EventEmitter.decorate(this);
}
NewWebConsoleFrame.prototype = {
/**
* Getter for the debugger WebConsoleClient.
* @type object
*/
get webConsoleClient() {
return this.proxy ? this.proxy.webConsoleClient : null;
},
/**
* Initialize the WebConsoleFrame instance.
* @return object
* A promise object that resolves once the frame is ready to use.
*/
init() {
this._initUI();
let connectionInited = this._initConnection();
// Don't reject if the history fails to load for some reason.
// This would be fine, the panel will just start with empty history.
let allReady = this.jsterm.historyLoaded.catch(() => {}).then(() => {
return connectionInited;
});
// This notification is only used in tests. Don't chain it onto
// the returned promise because the console panel needs to be attached
// to the toolbox before the web-console-created event is receieved.
let notifyObservers = () => {
let id = WebConsoleUtils.supportsString(this.hudId);
if (Services.obs) {
Services.obs.notifyObservers(id, "web-console-created");
}
};
allReady.then(notifyObservers, notifyObservers)
.then(this.newConsoleOutput.init);
return allReady;
},
destroy() {
if (this._destroyer) {
return this._destroyer.promise;
}
this._destroyer = defer();
Services.prefs.addObserver(PREF_MESSAGE_TIMESTAMP, this._onToolboxPrefChanged);
this.React = this.ReactDOM = this.FrameView = null;
let onDestroy = () => {
this._destroyer.resolve(null);
};
if (this.proxy) {
this.proxy.disconnect().then(onDestroy);
this.proxy = null;
} else {
onDestroy();
}
return this._destroyer.promise;
},
_onUpdateListeners() {
},
logWarningAboutReplacedAPI() {
},
/**
* Setter for saving of network request and response bodies.
*
* @param boolean value
* The new value you want to set.
*/
setSaveRequestAndResponseBodies: function (value) {
if (!this.webConsoleClient) {
// Don't continue if the webconsole disconnected.
return promise.resolve(null);
}
let deferred = defer();
let newValue = !!value;
let toSet = {
"NetworkMonitor.saveRequestAndResponseBodies": newValue,
};
// Make sure the web console client connection is established first.
this.webConsoleClient.setPreferences(toSet, response => {
if (!response.error) {
this._saveRequestAndResponseBodies = newValue;
deferred.resolve(response);
} else {
deferred.reject(response.error);
}
});
return deferred.promise;
},
/**
* Connect to the server using the remote debugging protocol.
*
* @private
* @return object
* A promise object that is resolved/reject based on the connection
* result.
*/
_initConnection: function () {
if (this._initDefer) {
return this._initDefer.promise;
}
this._initDefer = defer();
this.proxy = new WebConsoleConnectionProxy(this, this.owner.target);
this.proxy.connect().then(() => {
// on success
this._initDefer.resolve(this);
}, (reason) => {
// on failure
// TODO Print a message to console
this._initDefer.reject(reason);
});
return this._initDefer.promise;
},
_initUI: function () {
this.document = this.window.document;
this.rootElement = this.document.documentElement;
this.outputNode = this.document.getElementById("output-container");
this.completeNode = this.document.querySelector(".jsterm-complete-node");
this.inputNode = this.document.querySelector(".jsterm-input-node");
this.jsterm = new JSTerm(this);
this.jsterm.init();
let toolbox = gDevTools.getToolbox(this.owner.target);
// @TODO Remove this once JSTerm is handled with React/Redux.
this.window.jsterm = this.jsterm;
// @TODO Once the toolbox has been converted to React, see if passing
// in JSTerm is still necessary.
// Handle both launchpad and toolbox loading
let Wrapper = this.owner.NewConsoleOutputWrapper || this.window.NewConsoleOutput;
this.newConsoleOutput = new Wrapper(
this.outputNode, this.jsterm, toolbox, this.owner, this.document);
// Toggle the timestamp on preference change
Services.prefs.addObserver(PREF_MESSAGE_TIMESTAMP, this._onToolboxPrefChanged);
this._onToolboxPrefChanged();
},
/**
* Handler for page location changes.
*
* @param string uri
* New page location.
* @param string title
* New page title.
*/
onLocationChange: function (uri, title) {
this.contentLocation = uri;
if (this.owner.onLocationChange) {
this.owner.onLocationChange(uri, title);
}
},
/**
* Release an actor.
*
* @private
* @param string actor
* The actor ID you want to release.
*/
_releaseObject: function (actor) {
if (this.proxy) {
this.proxy.releaseActor(actor);
}
},
/**
* Called when the message timestamp pref changes.
*/
_onToolboxPrefChanged: function () {
let newValue = Services.prefs.getBoolPref(PREF_MESSAGE_TIMESTAMP);
this.newConsoleOutput.dispatchTimestampsToggle(newValue);
},
/**
* Handler for the tabNavigated notification.
*
* @param string event
* Event name.
* @param object packet
* Notification packet received from the server.
*/
handleTabNavigated: function (event, packet) {
if (event == "will-navigate") {
if (this.persistLog) {
// Add a _type to hit convertCachedPacket.
packet._type = true;
this.newConsoleOutput.dispatchMessageAdd(packet);
} else {
this.jsterm.clearOutput();
}
}
if (packet.url) {
this.onLocationChange(packet.url, packet.title);
}
if (event == "navigate" && !packet.nativeConsoleAPI) {
this.logWarningAboutReplacedAPI();
}
},
};
exports.NewWebConsoleFrame = NewWebConsoleFrame;

Просмотреть файл

@ -1,21 +1,35 @@
{
"name": "webconsole",
"version": "0.0.1",
"devDependencies": {
"amd-loader": "0.0.5",
"babel-preset-es2015": "^6.6.0",
"babel-register": "^6.7.2",
"cross-env": "^3.1.3",
"enzyme": "^2.4.1",
"expect": "^1.16.0",
"jsdom": "^9.4.1",
"jsdom-global": "^2.0.0",
"mocha": "^2.5.3",
"require-hacker": "^2.1.4",
"sinon": "^1.17.5"
"engines": {
"node": ">=6.9.0"
},
"scripts": {
"postinstall": "cd ../ && npm install && cd webconsole",
"test": "cross-env NODE_PATH=../../../ mocha new-console-output/test/**/*.test.js --compilers js:babel-register -r jsdom-global/register -r ./new-console-output/test/require-helper.js"
"start": "node bin/dev-server",
"test": "cross-env NODE_ENV=test NODE_PATH=../../../ mocha new-console-output/test/**/*.test.js --compilers js:babel-register -r jsdom-global/register -r ./new-console-output/test/require-helper.js"
},
"dependencies": {
"amd-loader": "0.0.5",
"babel-preset-es2015": "^6.6.0",
"babel-register": "^6.24.0",
"cross-env": "^3.1.3",
"devtools-config": "0.0.12",
"devtools-launchpad": "0.0.67",
"devtools-modules": "0.0.24",
"enzyme": "^2.4.1",
"expect": "^1.16.0",
"file-loader": "^0.10.1",
"immutable": "^3.8.1",
"jsdom": "^9.4.1",
"jsdom-global": "^2.0.0",
"json-loader": "^0.5.4",
"mocha": "^2.5.3",
"raw-loader": "^0.5.1",
"react": "=15.3.2",
"react-dom": "=15.3.2",
"react-redux": "=5.0.3",
"redux": "^3.6.0",
"require-hacker": "^2.1.4",
"sinon": "^1.17.5"
}
}

Просмотреть файл

@ -6,14 +6,8 @@
"use strict";
const {Cc, Ci, Cu} = require("chrome");
const {Utils: WebConsoleUtils} =
require("devtools/client/webconsole/utils");
const BrowserLoaderModule = {};
Cu.import("resource://devtools/client/shared/browser-loader.js", BrowserLoaderModule);
const promise = require("promise");
const {Utils: WebConsoleUtils} = require("devtools/client/webconsole/utils");
const defer = require("devtools/shared/defer");
const Services = require("Services");
const STRINGS_URI = "devtools/client/locales/webconsole.properties";
@ -127,18 +121,17 @@ WebConsoleConnectionProxy.prototype = {
return this._connectDefer.promise;
}
this._connectDefer = promise.defer();
this._connectDefer = defer();
let timeout = Services.prefs.getIntPref(PREF_CONNECTION_TIMEOUT);
this._connectTimer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
this._connectTimer.initWithCallback(this._connectionTimeout,
timeout, Ci.nsITimer.TYPE_ONE_SHOT);
this._connectTimer = setTimeout(this._connectionTimeout, timeout);
let connPromise = this._connectDefer.promise;
connPromise.then(() => {
this._connectTimer.cancel();
clearTimeout(this._connectTimer);
this._connectTimer = null;
}, () => {
clearTimeout(this._connectTimer);
this._connectTimer = null;
});
@ -475,7 +468,7 @@ WebConsoleConnectionProxy.prototype = {
return this._disconnecter.promise;
}
this._disconnecter = promise.defer();
this._disconnecter = defer();
if (!this.client) {
this._disconnecter.resolve(null);

Просмотреть файл

@ -0,0 +1,128 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* eslint-env node */
/* eslint max-len: [0] */
"use strict";
const {toolboxConfig} = require("./node_modules/devtools-launchpad/index");
const { NormalModuleReplacementPlugin } = require("webpack");
const {getConfig} = require("./bin/configure");
const path = require("path");
const projectPath = path.join(__dirname, "local-dev");
let webpackConfig = {
entry: {
console: [path.join(projectPath, "index.js")],
},
module: {
loaders: [
{
test: /\.(png|svg)$/,
loader: "file-loader?name=[path][name].[ext]",
},
]
},
output: {
path: path.join(__dirname, "assets/build"),
filename: "[name].js",
publicPath: "/assets/build",
},
externals: [
{
"promise": "var Promise",
}
],
};
webpackConfig.resolve = {
alias: {
"Services": "devtools-modules/client/shared/shim/Services",
"devtools/client/webconsole/jsterm": path.join(projectPath, "jsterm-stub"),
"devtools/client/webconsole/utils": path.join(__dirname, "new-console-output/test/fixtures/WebConsoleUtils"),
"devtools/client/webconsole/new-console-output": path.join(__dirname, "new-console-output"),
"devtools/client/webconsole/webconsole-connection-proxy": path.join(__dirname, "webconsole-connection-proxy"),
"react": path.join(__dirname, "node_modules/react"),
"devtools/client/shared/vendor/immutable": "immutable",
"devtools/client/shared/vendor/react": "react",
"devtools/client/shared/vendor/react-dom": "react-dom",
"devtools/client/shared/vendor/react-redux": "react-redux",
"devtools/client/shared/vendor/redux": "redux",
"devtools/client/locales": path.join(__dirname, "../../client/locales/en-US"),
"toolkit/locales": path.join(__dirname, "../../../toolkit/locales/en-US"),
"devtools/shared/locales": path.join(__dirname, "../../shared/locales/en-US"),
"devtools/shared/plural-form": path.join(__dirname, "../../shared/plural-form"),
"devtools/shared/l10n": path.join(__dirname, "../../shared/l10n"),
"devtools/client/framework/devtools": path.join(__dirname, "../../client/shims/devtools"),
"devtools/client/framework/menu": "devtools-modules/client/framework/menu",
"devtools/client/framework/menu-item": path.join(__dirname, "../../client/framework/menu-item"),
"devtools/client/shared/components/reps/reps": path.join(__dirname, "../../client/shared/components/reps/reps"),
"devtools/client/shared/redux/middleware/thunk": path.join(__dirname, "../../client/shared/redux/middleware/thunk"),
"devtools/client/shared/components/stack-trace": path.join(__dirname, "../../client/shared/components/stack-trace"),
"devtools/client/shared/source-utils": path.join(__dirname, "../../client/shared/source-utils"),
"devtools/client/shared/components/frame": path.join(__dirname, "../../client/shared/components/frame"),
"devtools/shared/defer": path.join(__dirname, "../../shared/defer"),
"devtools/shared/event-emitter": "devtools-modules/shared/event-emitter",
"devtools/shared/client/main": path.join(__dirname, "new-console-output/test/fixtures/ObjectClient"),
"devtools/shared/platform/clipboard": path.join(__dirname, "../../shared/platform/content/clipboard"),
}
};
const mappings = [
[
/utils\/menu/, "devtools-launchpad/src/components/shared/menu"
],
[
/chrome:\/\/devtools\/skin/,
(result) => {
result.request = result.request
.replace("./chrome://devtools/skin", path.join(__dirname, "../themes"));
}
],
[
/chrome:\/\/devtools\/content/,
(result) => {
result.request = result.request
.replace("./chrome://devtools/content", path.join(__dirname, ".."));
}
],
[
/resource:\/\/devtools/,
(result) => {
result.request = result.request
.replace("./resource://devtools/client", path.join(__dirname, ".."));
}
],
];
webpackConfig.plugins = mappings.map(([regex, res]) =>
new NormalModuleReplacementPlugin(regex, res));
// Exclude to transpile all scripts in devtools/ but not for this folder
const basePath = path.join(__dirname, "../../").replace(/\\/g, "\\\\");
const baseName = path.basename(__dirname);
webpackConfig.babelExcludes = new RegExp(`^${basePath}(.(?!${baseName}))*$`);
let config = toolboxConfig(webpackConfig, getConfig());
// Remove loaders from devtools-launchpad's webpack.config.js
// * For svg-inline loader:
// Webconsole uses file loader to bundle image assets instead of svg-inline loader
// * For raw loader:
// devtools/shared/l10n has preloaded raw loader in require.context
config.module.loaders = config.module.loaders
.filter((loader) => !["svg-inline", "raw"].includes(loader.loader));
module.exports = config;

Просмотреть файл

@ -0,0 +1,33 @@
<!doctype html>
<html class="reftest-wait">
<head>
<meta charset=utf-8>
<title>Bug 1359658: Animation-only dirty descendants bit should be cleared
for display:none content</title>
</head>
<body>
<div id="ancestor">
<svg>
<rect id="target" width="100%" height="100%" fill="lime"/>
</svg>
</div>
</body>
<script>
'use strict';
const ancestor = document.getElementById('ancestor');
const target = document.getElementById('target');
document.addEventListener('DOMContentLoaded', () => {
const animation = target.animate({ color: [ 'red', 'lime' ] },
{ duration: 1000, iterations: Infinity });
requestAnimationFrame(() => {
// Tweak animation to cause animation dirty bit to be set
animation.effect.timing.duration = 2000;
ancestor.style.display = "none";
getComputedStyle(ancestor).display;
document.documentElement.className = '';
});
});
</script>
</html>

Просмотреть файл

@ -26,3 +26,4 @@ pref(dom.animations-api.core.enabled,true) load 1333539-1.html
pref(dom.animations-api.core.enabled,true) load 1333539-2.html
pref(dom.animations-api.core.enabled,true) load 1333418-1.html
pref(dom.animations-api.core.enabled,true) load 1343589-1.html
pref(dom.animations-api.core.enabled,true) load 1359658-1.html

Просмотреть файл

@ -10271,4 +10271,29 @@ nsContentUtils::IsLocalRefURL(const nsString& aString)
}
return false;
}
}
// Tab ID is composed in a similar manner of Window ID.
static uint64_t gNextTabId = 0;
static const uint64_t kTabIdProcessBits = 32;
static const uint64_t kTabIdTabBits = 64 - kTabIdProcessBits;
/* static */ uint64_t
nsContentUtils::GenerateTabId()
{
uint64_t processId = 0;
if (XRE_IsContentProcess()) {
ContentChild* cc = ContentChild::GetSingleton();
processId = cc->GetID();
}
MOZ_RELEASE_ASSERT(processId < (uint64_t(1) << kTabIdProcessBits));
uint64_t processBits = processId & ((uint64_t(1) << kTabIdProcessBits) - 1);
uint64_t tabId = ++gNextTabId;
MOZ_RELEASE_ASSERT(tabId < (uint64_t(1) << kTabIdTabBits));
uint64_t tabBits = tabId & ((uint64_t(1) << kTabIdTabBits) - 1);
return (processBits << kTabIdTabBits) | tabBits;
}

Просмотреть файл

@ -2873,6 +2873,11 @@ public:
static bool
IsCustomElementsEnabled() { return sIsCustomElementsEnabled; }
/**
* Compose a tab id with process id and a serial number.
*/
static uint64_t GenerateTabId();
private:
static bool InitializeEventTable();

Просмотреть файл

@ -300,16 +300,29 @@ nsDOMTokenList::Toggle(const nsAString& aToken,
AutoTArray<nsString, 1> tokens;
(*tokens.AppendElement()).Rebind(aToken.Data(), aToken.Length());
if (isPresent) {
if (!forceOn) {
RemoveInternal(attr, tokens);
isPresent = false;
}
} else {
if (!forceOff) {
AddInternal(attr, tokens);
isPresent = true;
if (isPresent && !forceOn) {
RemoveInternal(attr, tokens);
return false;
}
if (!isPresent && !forceOff) {
AddInternal(attr, tokens);
return true;
}
if (attr) {
// Remove duplicates and whitespace from attr
RemoveDuplicates(attr);
nsAutoString resultStr;
for (uint32_t i = 0; i < attr->GetAtomCount(); i++) {
if (!resultStr.IsEmpty()) {
resultStr.AppendLiteral(" ");
}
resultStr.Append(nsDependentAtomString(attr->AtomAt(i)));
}
mElement->SetAttr(kNameSpaceID_None, mAttrAtom, resultStr, true);
}
return isPresent;
@ -375,9 +388,7 @@ nsDOMTokenList::ReplaceInternal(const nsAttrValue* aAttr,
resultStr.Append(nsDependentAtomString(aAttr->AtomAt(i)));
}
if (sawIt) {
mElement->SetAttr(kNameSpaceID_None, mAttrAtom, resultStr, true);
}
mElement->SetAttr(kNameSpaceID_None, mAttrAtom, resultStr, true);
}
bool

Просмотреть файл

@ -627,7 +627,6 @@ GK_ATOM(mouseout, "mouseout")
GK_ATOM(mouseover, "mouseover")
GK_ATOM(mousethrough, "mousethrough")
GK_ATOM(mouseup, "mouseup")
GK_ATOM(mozaudiochannel, "mozaudiochannel")
GK_ATOM(mozfullscreenchange, "mozfullscreenchange")
GK_ATOM(mozfullscreenerror, "mozfullscreenerror")
GK_ATOM(mozpointerlockchange, "mozpointerlockchange")
@ -1959,10 +1958,6 @@ GK_ATOM(onuserproximity, "onuserproximity")
// light sensor support
GK_ATOM(ondevicelight, "ondevicelight")
// Audio channel events
GK_ATOM(onmozinterruptbegin, "onmozinterruptbegin")
GK_ATOM(onmozinterruptend, "onmozinterruptend")
// MediaDevices device change event
GK_ATOM(ondevicechange, "ondevicechange")

Просмотреть файл

@ -257,7 +257,7 @@ ToStringHelper(const char* aSeverity, const nsString& aMessage,
static const char format2[] =
"[%s: \"%s\"]";
char* temp;
UniqueChars temp;
char* tempMessage = nullptr;
char* tempSourceName = nullptr;
char* tempSourceLine = nullptr;
@ -301,8 +301,7 @@ ToStringHelper(const char* aSeverity, const nsString& aMessage,
if (!temp)
return NS_ERROR_OUT_OF_MEMORY;
aResult.Assign(temp);
JS_smprintf_free(temp);
aResult.Assign(temp.get());
return NS_OK;
}

Просмотреть файл

@ -112,10 +112,10 @@ GamepadManager::StopMonitoring()
mChannelChildren.Clear();
mGamepads.Clear();
#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX)
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
vm->SendControllerListenerRemoved();
#endif
if (gfx::VRManagerChild::IsCreated()) {
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
vm->SendControllerListenerRemoved();
}
}
void
@ -729,12 +729,14 @@ GamepadManager::VibrateHaptic(uint32_t aControllerIdx, uint32_t aHapticIndex,
}
if (aControllerIdx >= VR_GAMEPAD_IDX_OFFSET) {
uint32_t index = aControllerIdx - VR_GAMEPAD_IDX_OFFSET;
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
vm->AddPromise(mPromiseID, promise);
vm->SendVibrateHaptic(index, aHapticIndex,
aIntensity, aDuration,
mPromiseID);
if (gfx::VRManagerChild::IsCreated()) {
const uint32_t index = aControllerIdx - VR_GAMEPAD_IDX_OFFSET;
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
vm->AddPromise(mPromiseID, promise);
vm->SendVibrateHaptic(index, aHapticIndex,
aIntensity, aDuration,
mPromiseID);
}
} else {
for (const auto& channelChild: mChannelChildren) {
channelChild->AddPromise(mPromiseID, promise);
@ -754,9 +756,11 @@ GamepadManager::StopHaptics()
for (auto iter = mGamepads.Iter(); !iter.Done(); iter.Next()) {
const uint32_t gamepadIndex = iter.UserData()->HashKey();
if (gamepadIndex >= VR_GAMEPAD_IDX_OFFSET) {
const uint32_t index = gamepadIndex - VR_GAMEPAD_IDX_OFFSET;
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
vm->SendStopVibrateHaptic(index);
if (gfx::VRManagerChild::IsCreated()) {
const uint32_t index = gamepadIndex - VR_GAMEPAD_IDX_OFFSET;
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
vm->SendStopVibrateHaptic(index);
}
} else {
for (auto& channelChild : mChannelChildren) {
channelChild->SendStopVibrateHaptic(gamepadIndex);
@ -781,12 +785,12 @@ GamepadManager::ActorCreated(PBackgroundChild *aActor)
child->SendGamepadListenerAdded();
mChannelChildren.AppendElement(child);
#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX)
// Construct VRManagerChannel and ask adding the connected
// VR controllers to GamepadManager
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
vm->SendControllerListenerAdded();
#endif
if (gfx::VRManagerChild::IsCreated()) {
// Construct VRManagerChannel and ask adding the connected
// VR controllers to GamepadManager
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
vm->SendControllerListenerAdded();
}
}
//Override nsIIPCBackgroundChildCreateCallback

Просмотреть файл

@ -1444,23 +1444,6 @@ NS_IMPL_BOOL_ATTR(HTMLMediaElement, Loop, loop)
NS_IMPL_BOOL_ATTR(HTMLMediaElement, DefaultMuted, muted)
NS_IMPL_ENUM_ATTR_DEFAULT_VALUE(HTMLMediaElement, Preload, preload, nullptr)
NS_IMETHODIMP
HTMLMediaElement::GetMozAudioChannelType(nsAString& aValue)
{
nsString defaultValue;
AudioChannelService::GetDefaultAudioChannelString(defaultValue);
NS_ConvertUTF16toUTF8 str(defaultValue);
GetEnumAttr(nsGkAtoms::mozaudiochannel, str.get(), aValue);
return NS_OK;
}
NS_IMETHODIMP
HTMLMediaElement::SetMozAudioChannelType(const nsAString& aValue)
{
return SetAttrHelper(nsGkAtoms::mozaudiochannel, aValue);
}
NS_IMETHODIMP_(bool)
HTMLMediaElement::IsVideo()
{
@ -4142,77 +4125,12 @@ bool HTMLMediaElement::ParseAttribute(int32_t aNamespaceID,
if (aAttribute == nsGkAtoms::preload) {
return aResult.ParseEnumValue(aValue, kPreloadTable, false);
}
// Remove the b2g-specific audio channel setting in bug1299390.
if (aAttribute == nsGkAtoms::mozaudiochannel) {
const nsAttrValue::EnumTable* table =
AudioChannelService::GetAudioChannelTable();
MOZ_ASSERT(table);
bool parsed = aResult.ParseEnumValue(aValue, table, false, &table[0]);
if (!parsed) {
return false;
}
AudioChannel audioChannel = static_cast<AudioChannel>(aResult.GetEnumValue());
if (audioChannel == mAudioChannel ||
!CheckAudioChannelPermissions(aValue)) {
return true;
}
// We cannot change the AudioChannel of a decoder.
if (mDecoder) {
return true;
}
mAudioChannel = audioChannel;
if (mSrcStream) {
RefPtr<MediaStream> stream = GetSrcMediaStream();
if (stream) {
stream->SetAudioChannelType(mAudioChannel);
}
}
return true;
}
}
return nsGenericHTMLElement::ParseAttribute(aNamespaceID, aAttribute, aValue,
aResult);
}
bool HTMLMediaElement::CheckAudioChannelPermissions(const nsAString& aString)
{
// Only normal channel doesn't need permission.
if (aString.EqualsASCII("normal")) {
return true;
}
// Maybe this audio channel is equal to the default value from the pref.
nsString audioChannel;
AudioChannelService::GetDefaultAudioChannelString(audioChannel);
if (audioChannel.Equals(aString)) {
return true;
}
nsCOMPtr<nsIPermissionManager> permissionManager =
services::GetPermissionManager();
if (!permissionManager) {
return false;
}
uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION;
permissionManager->TestExactPermissionFromPrincipal(NodePrincipal(),
nsCString(NS_LITERAL_CSTRING("audio-channel-") + NS_ConvertUTF16toUTF8(aString)).get(), &perm);
if (perm != nsIPermissionManager::ALLOW_ACTION) {
return false;
}
return true;
}
void HTMLMediaElement::DoneCreatingElement()
{
if (HasAttr(kNameSpaceID_None, nsGkAtoms::muted)) {
@ -7063,15 +6981,6 @@ HTMLMediaElement::GetOrCreateTextTrackManager()
return mTextTrackManager;
}
void
HTMLMediaElement::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv)
{
nsString channel;
channel.AssignASCII(AudioChannelValues::strings[uint32_t(aValue)].value,
AudioChannelValues::strings[uint32_t(aValue)].length);
SetHTMLAttr(nsGkAtoms::mozaudiochannel, channel, aRv);
}
MediaDecoderOwner::NextFrameStatus
HTMLMediaElement::NextFrameStatus()
{

Просмотреть файл

@ -693,13 +693,6 @@ public:
double MozFragmentEnd();
AudioChannel MozAudioChannelType() const
{
return mAudioChannel;
}
void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv);
AudioTrackList* AudioTracks();
VideoTrackList* VideoTracks();
@ -751,9 +744,6 @@ public:
// that will soon be gone.
bool IsBeingDestroyed();
IMPL_EVENT_HANDLER(mozinterruptbegin)
IMPL_EVENT_HANDLER(mozinterruptend)
// These are used for testing only
float ComputedVolume() const;
bool ComputedMuted() const;
@ -1250,9 +1240,6 @@ protected:
void ReportTelemetry();
// Check the permissions for audiochannel.
bool CheckAudioChannelPermissions(const nsAString& aType);
// Seeks to aTime seconds. aSeekType can be Exact to seek to exactly the
// seek target, or PrevSyncPoint if a quicker but less precise seek is
// desired, and we'll seek to the sync point (keyframe and/or start of the

Просмотреть файл

@ -1,91 +0,0 @@
<!DOCTYPE HTML>
<html>
<body>
<div id="content" style="display: none">
<audio id="audio1" />
<audio id="audio2" mozaudiochannel="foo" />
</div>
<script type="application/javascript">
function is(a, b, msg) {
parent.postMessage({ status: a === b, msg: msg }, '*');
}
function ok(a, msg) {
parent.postMessage({ status: !!a, msg: msg }, '*');
}
function finish() {
parent.postMessage({ finish: true }, '*');
}
function test_basic() {
var audio1 = document.getElementById("audio1");
ok(audio1, "Audio Element exists");
is(audio1.mozAudioChannelType, "normal", "Default audio1 channel == 'normal'");
try {
audio1.mozAudioChannelType = "foo";
} catch(e) {}
is(audio1.mozAudioChannelType, "normal", "Default audio1 channel == 'normal'");
var audio2 = document.getElementById("audio2");
ok(audio2, "Audio Element exists");
is(audio2.mozAudioChannelType, "normal", "Default audio2 channel == 'normal'");
try {
audio2.mozAudioChannelType = "foo";
} catch(e) {}
is(audio2.mozAudioChannelType, "normal", "Default audio2 channel == 'normal'");
runTest();
}
function test_preferences(aChannel) {
SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", aChannel ]]},
function() {
var audio = document.createElement('audio');
ok(audio, "Audio Element created");
is(audio.mozAudioChannelType, aChannel, "Default audio channel == '" + aChannel + "'");
runTest();
}
);
}
function test_wrong_preferences() {
SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", 'foobar' ]]},
function() {
var audio = document.createElement('audio');
ok(audio, "Audio Element created");
is(audio.mozAudioChannelType, 'normal', "Default audio channel == 'normal'");
runTest();
}
);
}
var tests = [
test_basic,
function() { test_preferences("content"); },
function() { test_preferences("notification"); },
function() { test_preferences("alarm"); },
function() { test_preferences("telephony"); },
function() { test_preferences("ringer"); },
function() { test_preferences("publicnotification"); },
test_wrong_preferences,
];
function runTest() {
if (!tests.length) {
finish();
return;
}
var test = tests.shift();
test();
}
runTest();
</script>
</body>
</html>

Просмотреть файл

@ -183,7 +183,6 @@ support-files =
reflect.js
file_ignoreuserfocus.html
simpleFileOpener.js
file_mozaudiochannel.html
file_bug1166138_1x.png
file_bug1166138_2x.png
file_bug1166138_def.png
@ -506,7 +505,6 @@ skip-if = toolkit == 'android' # bug 939642
[test_map_attributes_reflection.html]
[test_meta_attributes_reflection.html]
[test_mod_attributes_reflection.html]
[test_mozaudiochannel.html]
[test_named_options.html]
[test_nested_invalid_fieldsets.html]
[test_object_attributes_reflection.html]

Просмотреть файл

@ -1,31 +0,0 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test for mozaudiochannel</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/EventUtils.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<pre id="test">
<script type="application/javascript">
SimpleTest.waitForExplicitFinish();
SpecialPowers.pushPrefEnv({"set": [["media.useAudioChannelAPI", true]]}, function() {
var ifr = document.createElement('iframe');
ifr.src = 'file_mozaudiochannel.html';
onmessage = function(e) {
if ("finish" in e.data) {
SimpleTest.finish();
} else {
ok(e.data.status, e.data.msg);
}
}
document.body.appendChild(ifr);
});
</script>
</pre>
</body>
</html>

Просмотреть файл

@ -87,40 +87,6 @@ interface nsIDOMHTMLMediaElement : nsISupports
// it is equal to the media duration.
readonly attribute double mozFragmentEnd;
// Mozilla extension: an audio channel type for media elements.
// An exception is thrown if the app tries to change the audio channel type
// without the permission (manifest file for B2G apps).
// The supported values are:
// * normal (default value)
// Automatically paused if "notification" or higher priority channel
// is played
// Use case: normal applications
// * content
// Automatically paused if "notification" or higher priority channel
// is played. Also paused if another app starts using "content"
// channel. Using this channel never affects applications using
// the "normal" channel.
// Use case: video/audio players
// * notification
// Automatically paused if "alarm" or higher priority channel is played.
// Use case: New email, incoming SMS
// * alarm
// Automatically paused if "telephony" or higher priority channel is
// played.
// User case: Alarm clock, calendar alarms
// * telephony
// Automatically paused if "ringer" or higher priority
// channel is played.
// Use case: dialer, voip
// * ringer
// Automatically paused if "publicnotification" or higher priority
// channel is played.
// Use case: dialer, voip
// * publicnotification
// Always plays in speaker, even when headphones are plugged in.
// Use case: Camera shutter sound.
attribute DOMString mozAudioChannelType;
// In addition the media element has this new events:
// * onmozinterruptbegin - called when the media element is interrupted
// because of the audiochannel manager.

Просмотреть файл

@ -775,11 +775,7 @@ ContentChild::ProvideWindowCommon(TabChild* aTabOpener,
}
MOZ_ASSERT(ipcContext);
TabId tabId;
SendAllocateTabId(openerTabId,
*ipcContext,
GetID(),
&tabId);
TabId tabId(nsContentUtils::GenerateTabId());
// We need to assign a TabGroup to the PBrowser actor before we send it to the
// parent. Otherwise, the parent could send messages to us before we have a

Просмотреть файл

@ -910,9 +910,9 @@ mozilla::ipc::IPCResult
ContentParent::RecvCreateChildProcess(const IPCTabContext& aContext,
const hal::ProcessPriority& aPriority,
const TabId& aOpenerTabId,
const TabId& aTabId,
ContentParentId* aCpId,
bool* aIsForBrowser,
TabId* aTabId)
bool* aIsForBrowser)
{
#if 0
if (!CanOpenBrowser(aContext)) {
@ -943,12 +943,8 @@ ContentParent::RecvCreateChildProcess(const IPCTabContext& aContext,
ContentProcessManager *cpm = ContentProcessManager::GetSingleton();
cpm->AddContentProcess(cp, this->ChildID());
if (cpm->AddGrandchildProcess(this->ChildID(), cp->ChildID())) {
// Pre-allocate a TabId here to save one time IPC call at app startup.
*aTabId = AllocateTabId(aOpenerTabId, aContext, cp->ChildID());
if (*aTabId == 0) {
return IPC_FAIL_NO_REASON(this);
}
if (cpm->AddGrandchildProcess(this->ChildID(), cp->ChildID()) &&
cpm->RegisterRemoteFrame(aTabId, aOpenerTabId, aContext, cp->ChildID())) {
return IPC_OK();
}
@ -1180,7 +1176,7 @@ ContentParent::CreateBrowser(const TabContext& aContext,
ProcessPriority initialPriority = GetInitialProcessPriority(aFrameElement);
bool isInContentProcess = !XRE_IsParentProcess();
TabId tabId;
TabId tabId(nsContentUtils::GenerateTabId());
nsIDocShell* docShell = GetOpenerDocShellHelper(aFrameElement);
TabId openerTabId;
@ -1198,7 +1194,7 @@ ContentParent::CreateBrowser(const TabContext& aContext,
if (isInContentProcess) {
MOZ_ASSERT(aContext.IsMozBrowserElement());
constructorSender = CreateContentBridgeParent(aContext, initialPriority,
openerTabId, &tabId);
openerTabId, tabId);
} else {
if (aOpenerContentParent) {
constructorSender = aOpenerContentParent;
@ -1209,9 +1205,11 @@ ContentParent::CreateBrowser(const TabContext& aContext,
return nullptr;
}
}
tabId = AllocateTabId(openerTabId,
aContext.AsIPCTabContext(),
constructorSender->ChildID());
ContentProcessManager* cpm = ContentProcessManager::GetSingleton();
cpm->RegisterRemoteFrame(tabId,
openerTabId,
aContext.AsIPCTabContext(),
constructorSender->ChildID());
}
if (constructorSender) {
nsCOMPtr<nsIDocShellTreeOwner> treeOwner;
@ -1271,7 +1269,7 @@ ContentParent::CreateBrowser(const TabContext& aContext,
ContentParent::CreateContentBridgeParent(const TabContext& aContext,
const hal::ProcessPriority& aPriority,
const TabId& aOpenerTabId,
/*out*/ TabId* aTabId)
const TabId& aTabId)
{
MOZ_ASSERT(aTabId);
@ -1281,9 +1279,9 @@ ContentParent::CreateContentBridgeParent(const TabContext& aContext,
if (!child->SendCreateChildProcess(aContext.AsIPCTabContext(),
aPriority,
aOpenerTabId,
aTabId,
&cpId,
&isForBrowser,
aTabId)) {
&isForBrowser)) {
return nullptr;
}
if (cpId == 0) {
@ -4217,27 +4215,8 @@ ContentParent::NotifyUpdatedDictionaries()
}
}
/*static*/ TabId
ContentParent::AllocateTabId(const TabId& aOpenerTabId,
const IPCTabContext& aContext,
const ContentParentId& aCpId)
{
TabId tabId;
if (XRE_IsParentProcess()) {
ContentProcessManager* cpm = ContentProcessManager::GetSingleton();
tabId = cpm->AllocateTabId(aOpenerTabId, aContext, aCpId);
}
else {
ContentChild::GetSingleton()->SendAllocateTabId(aOpenerTabId,
aContext,
aCpId,
&tabId);
}
return tabId;
}
/*static*/ void
ContentParent::DeallocateTabId(const TabId& aTabId,
ContentParent::UnregisterRemoteFrame(const TabId& aTabId,
const ContentParentId& aCpId,
bool aMarkedDestroying)
{
@ -4247,32 +4226,19 @@ ContentParent::DeallocateTabId(const TabId& aTabId,
cp->NotifyTabDestroyed(aTabId, aMarkedDestroying);
ContentProcessManager::GetSingleton()->DeallocateTabId(aCpId, aTabId);
ContentProcessManager::GetSingleton()->UnregisterRemoteFrame(aCpId, aTabId);
} else {
ContentChild::GetSingleton()->SendDeallocateTabId(aTabId, aCpId,
ContentChild::GetSingleton()->SendUnregisterRemoteFrame(aTabId, aCpId,
aMarkedDestroying);
}
}
mozilla::ipc::IPCResult
ContentParent::RecvAllocateTabId(const TabId& aOpenerTabId,
const IPCTabContext& aContext,
const ContentParentId& aCpId,
TabId* aTabId)
{
*aTabId = AllocateTabId(aOpenerTabId, aContext, aCpId);
if (!(*aTabId)) {
return IPC_FAIL_NO_REASON(this);
}
return IPC_OK();
}
mozilla::ipc::IPCResult
ContentParent::RecvDeallocateTabId(const TabId& aTabId,
ContentParent::RecvUnregisterRemoteFrame(const TabId& aTabId,
const ContentParentId& aCpId,
const bool& aMarkedDestroying)
{
DeallocateTabId(aTabId, aCpId, aMarkedDestroying);
UnregisterRemoteFrame(aTabId, aCpId, aMarkedDestroying);
return IPC_OK();
}

Просмотреть файл

@ -279,9 +279,9 @@ public:
virtual mozilla::ipc::IPCResult RecvCreateChildProcess(const IPCTabContext& aContext,
const hal::ProcessPriority& aPriority,
const TabId& aOpenerTabId,
const TabId& aTabId,
ContentParentId* aCpId,
bool* aIsForBrowser,
TabId* aTabId) override;
bool* aIsForBrowser) override;
virtual mozilla::ipc::IPCResult RecvBridgeToChildProcess(const ContentParentId& aCpId,
Endpoint<PContentBridgeParent>* aEndpoint) override;
@ -350,15 +350,10 @@ public:
jsipc::CPOWManager* GetCPOWManager() override;
static TabId
AllocateTabId(const TabId& aOpenerTabId,
const IPCTabContext& aContext,
const ContentParentId& aCpId);
static void
DeallocateTabId(const TabId& aTabId,
const ContentParentId& aCpId,
bool aMarkedDestroying);
UnregisterRemoteFrame(const TabId& aTabId,
const ContentParentId& aCpId,
bool aMarkedDestroying);
void ReportChildAlreadyBlocked();
@ -489,14 +484,9 @@ public:
SendPBlobConstructor(PBlobParent* aActor,
const BlobConstructorParams& aParams) override;
virtual mozilla::ipc::IPCResult RecvAllocateTabId(const TabId& aOpenerTabId,
const IPCTabContext& aContext,
const ContentParentId& aCpId,
TabId* aTabId) override;
virtual mozilla::ipc::IPCResult RecvDeallocateTabId(const TabId& aTabId,
const ContentParentId& aCpId,
const bool& aMarkedDestroying) override;
virtual mozilla::ipc::IPCResult RecvUnregisterRemoteFrame(const TabId& aTabId,
const ContentParentId& aCpId,
const bool& aMarkedDestroying) override;
virtual mozilla::ipc::IPCResult RecvNotifyTabDestroying(const TabId& aTabId,
const ContentParentId& aCpId) override;
@ -694,7 +684,7 @@ private:
static ContentBridgeParent* CreateContentBridgeParent(const TabContext& aContext,
const hal::ProcessPriority& aPriority,
const TabId& aOpenerTabId,
/*out*/ TabId* aTabId);
const TabId& aTabId);
// Hide the raw constructor methods since we don't want client code
// using them.

Просмотреть файл

@ -24,8 +24,6 @@
namespace mozilla {
namespace dom {
static uint64_t gTabId = 0;
/* static */
StaticAutoPtr<ContentProcessManager>
ContentProcessManager::sSingleton;
@ -135,17 +133,18 @@ ContentProcessManager::GetAllChildProcessById(const ContentParentId& aParentCpId
return Move(cpIdArray);
}
TabId
ContentProcessManager::AllocateTabId(const TabId& aOpenerTabId,
const IPCTabContext& aContext,
const ContentParentId& aChildCpId)
bool
ContentProcessManager::RegisterRemoteFrame(const TabId& aTabId,
const TabId& aOpenerTabId,
const IPCTabContext& aContext,
const ContentParentId& aChildCpId)
{
MOZ_ASSERT(NS_IsMainThread());
auto iter = mContentParentMap.find(aChildCpId);
if (NS_WARN_IF(iter == mContentParentMap.end())) {
ASSERT_UNLESS_FUZZING();
return TabId(0);
return false;
}
struct RemoteFrameInfo info;
@ -156,20 +155,10 @@ ContentProcessManager::AllocateTabId(const TabId& aOpenerTabId,
auto remoteFrameIter = iter->second.mRemoteFrames.find(aOpenerTabId);
if (remoteFrameIter == iter->second.mRemoteFrames.end()) {
ASSERT_UNLESS_FUZZING("Failed to find parent frame's opener id.");
return TabId(0);
return false;
}
info.mOpenerTabId = remoteFrameIter->second.mOpenerTabId;
const PopupIPCTabContext &ipcContext = aContext.get_PopupIPCTabContext();
MOZ_ASSERT(ipcContext.opener().type() == PBrowserOrId::TTabId);
remoteFrameIter = iter->second.mRemoteFrames.find(ipcContext.opener().get_TabId());
if (remoteFrameIter == iter->second.mRemoteFrames.end()) {
ASSERT_UNLESS_FUZZING("Failed to find tab id.");
return TabId(0);
}
info.mContext = remoteFrameIter->second.mContext;
}
else {
@ -178,21 +167,19 @@ ContentProcessManager::AllocateTabId(const TabId& aOpenerTabId,
NS_ERROR(nsPrintfCString("Received an invalid TabContext from "
"the child process. (%s)",
tc.GetInvalidReason()).get());
return TabId(0);
return false;
}
info.mOpenerTabId = aOpenerTabId;
info.mContext = tc.GetTabContext();
}
mUniqueId = ++gTabId;
iter->second.mRemoteFrames[mUniqueId] = info;
return mUniqueId;
iter->second.mRemoteFrames[aTabId] = info;
return true;
}
void
ContentProcessManager::DeallocateTabId(const ContentParentId& aChildCpId,
const TabId& aChildTabId)
ContentProcessManager::UnregisterRemoteFrame(const ContentParentId& aChildCpId,
const TabId& aChildTabId)
{
MOZ_ASSERT(NS_IsMainThread());

Просмотреть файл

@ -72,21 +72,23 @@ public:
GetAllChildProcessById(const ContentParentId& aParentCpId);
/**
* Allocate a tab id for the given content process's id.
* Register RemoteFrameInfo with given tab id.
* Used when a content process wants to create a new tab. aOpenerTabId and
* aContext are saved in RemoteFrameInfo, which is a part of
* ContentProcessInfo. We can use the tab id and process id to locate the
* TabContext for future use.
*/
TabId AllocateTabId(const TabId& aOpenerTabId,
const IPCTabContext& aContext,
const ContentParentId& aChildCpId);
bool RegisterRemoteFrame(const TabId& aTabId,
const TabId& aOpenerTabId,
const IPCTabContext& aContext,
const ContentParentId& aChildCpId);
/**
* Remove the RemoteFrameInfo by the given process and tab id.
*/
void DeallocateTabId(const ContentParentId& aChildCpId,
const TabId& aChildTabId);
void UnregisterRemoteFrame(const ContentParentId& aChildCpId,
const TabId& aChildTabId);
/**
* Get the TabContext by the given content process and tab id.
@ -151,7 +153,6 @@ public:
private:
static StaticAutoPtr<ContentProcessManager> sSingleton;
TabId mUniqueId;
std::map<ContentParentId, ContentProcessInfo> mContentParentMap;
ContentProcessManager() {MOZ_COUNT_CTOR(ContentProcessManager);};

Просмотреть файл

@ -624,8 +624,9 @@ parent:
sync CreateChildProcess(IPCTabContext context,
ProcessPriority priority,
TabId openerTabId)
returns (ContentParentId cpId, bool isForBrowser, TabId tabId);
TabId openerTabId,
TabId tabId)
returns (ContentParentId cpId, bool isForBrowser);
sync BridgeToChildProcess(ContentParentId cpId)
returns (Endpoint<PContentBridgeParent> endpoint);
@ -915,15 +916,10 @@ parent:
sync KeygenProvideContent()
returns (nsString aAttribute, nsString[] aContent);
/**
* Tell the chrome process there is an creation of PBrowser.
* return a system-wise unique Id.
*/
sync AllocateTabId(TabId openerTabId, IPCTabContext context, ContentParentId cpId)
returns (TabId tabId);
async DeallocateTabId(TabId tabId,
ContentParentId cpId,
bool aMarkedDestroying);
/** Clear RemoteFrameInfo of the given tab id. */
async UnregisterRemoteFrame(TabId tabId,
ContentParentId cpId,
bool aMarkedDestroying);
/**
* Tell the chrome process there is a destruction of PBrowser(Tab)

Просмотреть файл

@ -413,15 +413,15 @@ mozilla::ipc::IPCResult
TabParent::Recv__delete__()
{
if (XRE_IsParentProcess()) {
ContentParent::DeallocateTabId(mTabId,
Manager()->AsContentParent()->ChildID(),
mMarkedDestroying);
ContentParent::UnregisterRemoteFrame(mTabId,
Manager()->AsContentParent()->ChildID(),
mMarkedDestroying);
}
else {
Manager()->AsContentBridgeParent()->NotifyTabDestroyed();
ContentParent::DeallocateTabId(mTabId,
Manager()->ChildID(),
mMarkedDestroying);
ContentParent::UnregisterRemoteFrame(mTabId,
Manager()->ChildID(),
mMarkedDestroying);
}
return IPC_OK();

Просмотреть файл

@ -10,6 +10,7 @@
#include "mozilla/dom/File.h"
#include "mozilla/dom/ContentParent.h"
#include "mozilla/dom/ContentBridgeParent.h"
#include "mozilla/dom/ContentProcessManager.h"
#include "mozilla/dom/PTabContext.h"
#include "mozilla/dom/PermissionMessageUtils.h"
#include "mozilla/dom/TabParent.h"
@ -138,12 +139,15 @@ nsIContentParent::AllocPBrowserParent(const TabId& aTabId,
}
uint32_t chromeFlags = aChromeFlags;
TabId openerTabId(0);
if (aContext.type() == IPCTabContext::TPopupIPCTabContext) {
// CanOpenBrowser has ensured that the IPCTabContext is of
// type PopupIPCTabContext, and that the opener TabParent is
// reachable.
const PopupIPCTabContext& popupContext = aContext.get_PopupIPCTabContext();
auto opener = TabParent::GetFrom(popupContext.opener().get_PBrowserParent());
openerTabId = opener->GetTabId();
// We must ensure that the private browsing and remoteness flags
// match those of the opener.
nsCOMPtr<nsILoadContext> loadContext = opener->GetLoadContext();
@ -158,6 +162,29 @@ nsIContentParent::AllocPBrowserParent(const TabId& aTabId,
}
}
if (openerTabId > 0 ||
aContext.type() == IPCTabContext::TUnsafeIPCTabContext) {
// Creation of PBrowser triggered from grandchild process is currently
// broken and not supported (i.e. this code path doesn't work in
// ContentBridgeParent).
//
// If you're working on fixing the code path for ContentBridgeParent,
// remember to handle the remote frame registration below carefully as it
// has to be registered in parent process.
MOZ_ASSERT(XRE_IsParentProcess());
if (!XRE_IsParentProcess()) {
return nullptr;
}
// The creation of PBrowser was triggered from content process through
// either window.open() or service worker's openWindow().
// We need to register remote frame with the child generated tab id.
ContentProcessManager* cpm = ContentProcessManager::GetSingleton();
if (!cpm->RegisterRemoteFrame(aTabId, openerTabId, aContext, aCpId)) {
return nullptr;
}
}
// And because we're allocating a remote browser, of course the
// window is remote.
chromeFlags |= nsIWebBrowserChrome::CHROME_REMOTE_WINDOW;

Просмотреть файл

@ -43,6 +43,8 @@ public:
bool Push(int64_t aOffset, int64_t aTime, int32_t aSampleRate,
uint32_t aFrames, uint32_t aChannels, CopyFunc aCopyFunc)
{
auto time = media::TimeUnit::FromMicroseconds(aTime);
// If we are losing more than a reasonable amount to padding, try to chunk
// the data.
size_t maxSlop = AudioDataSize(aFrames, aChannels) / MAX_SLOP_DIVISOR;
@ -63,14 +65,14 @@ public:
NS_ASSERTION(framesCopied <= aFrames, "functor copied too many frames");
buffer.SetLength(size_t(framesCopied) * aChannels);
CheckedInt64 duration = FramesToUsecs(framesCopied, aSampleRate);
if (!duration.isValid()) {
auto duration = FramesToTimeUnit(framesCopied, aSampleRate);
if (!duration.IsValid()) {
return false;
}
mQueue.Push(new AudioData(aOffset,
aTime,
duration.value(),
time,
duration,
framesCopied,
Move(buffer),
aChannels,
@ -78,7 +80,7 @@ public:
// Remove the frames we just pushed into the queue and loop if there is
// more to be done.
aTime += duration.value();
time += duration;
aFrames -= framesCopied;
// NOTE: No need to update aOffset as its only an approximation anyway.

Просмотреть файл

@ -73,8 +73,8 @@ AudioData::IsAudible() const
/* static */
already_AddRefed<AudioData>
AudioData::TransferAndUpdateTimestampAndDuration(AudioData* aOther,
int64_t aTimestamp,
int64_t aDuration)
const TimeUnit& aTimestamp,
const TimeUnit& aDuration)
{
NS_ENSURE_TRUE(aOther, nullptr);
RefPtr<AudioData> v = new AudioData(aOther->mOffset,
@ -163,10 +163,10 @@ IsInEmulator()
#endif
VideoData::VideoData(int64_t aOffset,
int64_t aTime,
int64_t aDuration,
const TimeUnit& aTime,
const TimeUnit& aDuration,
bool aKeyframe,
int64_t aTimecode,
const TimeUnit& aTimecode,
IntSize aDisplay,
layers::ImageContainer::FrameID aFrameID)
: MediaData(VIDEO_DATA, aOffset, aTime, aDuration, 1)
@ -176,7 +176,7 @@ VideoData::VideoData(int64_t aOffset,
{
MOZ_ASSERT(!mDuration.IsNegative(), "Frame must have non-negative duration.");
mKeyframe = aKeyframe;
mTimecode = TimeUnit::FromMicroseconds(aTimecode);
mTimecode = aTimecode;
}
VideoData::~VideoData()
@ -285,11 +285,11 @@ already_AddRefed<VideoData>
VideoData::CreateAndCopyData(const VideoInfo& aInfo,
ImageContainer* aContainer,
int64_t aOffset,
int64_t aTime,
const TimeUnit& aTime,
const TimeUnit& aDuration,
const YCbCrBuffer& aBuffer,
bool aKeyframe,
int64_t aTimecode,
const TimeUnit& aTimecode,
const IntRect& aPicture)
{
if (!aContainer) {
@ -297,7 +297,7 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo,
// send to media streams if necessary.
RefPtr<VideoData> v(new VideoData(aOffset,
aTime,
aDuration.ToMicroseconds(),
aDuration,
aKeyframe,
aTimecode,
aInfo.mDisplay,
@ -311,7 +311,7 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo,
RefPtr<VideoData> v(new VideoData(aOffset,
aTime,
aDuration.ToMicroseconds(),
aDuration,
aKeyframe,
aTimecode,
aInfo.mDisplay,
@ -369,12 +369,12 @@ already_AddRefed<VideoData>
VideoData::CreateAndCopyData(const VideoInfo& aInfo,
ImageContainer* aContainer,
int64_t aOffset,
int64_t aTime,
const TimeUnit& aTime,
const TimeUnit& aDuration,
const YCbCrBuffer& aBuffer,
const YCbCrBuffer::Plane &aAlphaPlane,
bool aKeyframe,
int64_t aTimecode,
const TimeUnit& aTimecode,
const IntRect& aPicture)
{
if (!aContainer) {
@ -382,7 +382,7 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo,
// send to media streams if necessary.
RefPtr<VideoData> v(new VideoData(aOffset,
aTime,
aDuration.ToMicroseconds(),
aDuration,
aKeyframe,
aTimecode,
aInfo.mDisplay,
@ -396,7 +396,7 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo,
RefPtr<VideoData> v(new VideoData(aOffset,
aTime,
aDuration.ToMicroseconds(),
aDuration,
aKeyframe,
aTimecode,
aInfo.mDisplay,
@ -435,15 +435,15 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo,
already_AddRefed<VideoData>
VideoData::CreateFromImage(const IntSize& aDisplay,
int64_t aOffset,
int64_t aTime,
const TimeUnit& aTime,
const TimeUnit& aDuration,
const RefPtr<Image>& aImage,
bool aKeyframe,
int64_t aTimecode)
const TimeUnit& aTimecode)
{
RefPtr<VideoData> v(new VideoData(aOffset,
aTime,
aDuration.ToMicroseconds(),
aDuration,
aKeyframe,
aTimecode,
aDisplay,

Просмотреть файл

@ -289,14 +289,14 @@ public:
MediaData(Type aType,
int64_t aOffset,
int64_t aTimestamp,
int64_t aDuration,
const media::TimeUnit& aTimestamp,
const media::TimeUnit& aDuration,
uint32_t aFrames)
: mType(aType)
, mOffset(aOffset)
, mTime(media::TimeUnit::FromMicroseconds(aTimestamp))
, mTimecode(media::TimeUnit::FromMicroseconds(aTimestamp))
, mDuration(media::TimeUnit::FromMicroseconds(aDuration))
, mTime(aTimestamp)
, mTimecode(aTimestamp)
, mDuration(aDuration)
, mFrames(aFrames)
, mKeyframe(false)
{
@ -366,7 +366,9 @@ protected:
class NullData : public MediaData
{
public:
NullData(int64_t aOffset, int64_t aTime, int64_t aDuration)
NullData(int64_t aOffset,
const media::TimeUnit& aTime,
const media::TimeUnit& aDuration)
: MediaData(NULL_DATA, aOffset, aTime, aDuration, 0)
{
}
@ -380,8 +382,8 @@ class AudioData : public MediaData
public:
AudioData(int64_t aOffset,
int64_t aTime,
int64_t aDuration,
const media::TimeUnit& aTime,
const media::TimeUnit& aDuration,
uint32_t aFrames,
AlignedAudioBuffer&& aData,
uint32_t aChannels,
@ -402,8 +404,8 @@ public:
// After such call, the original aOther is unusable.
static already_AddRefed<AudioData>
TransferAndUpdateTimestampAndDuration(AudioData* aOther,
int64_t aTimestamp,
int64_t aDuration);
const media::TimeUnit& aTimestamp,
const media::TimeUnit& aDuration);
size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
@ -489,43 +491,43 @@ public:
const VideoInfo& aInfo,
ImageContainer* aContainer,
int64_t aOffset,
int64_t aTime,
const media::TimeUnit& aTime,
const media::TimeUnit& aDuration,
const YCbCrBuffer& aBuffer,
bool aKeyframe,
int64_t aTimecode,
const media::TimeUnit& aTimecode,
const IntRect& aPicture);
static already_AddRefed<VideoData> CreateAndCopyData(
const VideoInfo& aInfo,
ImageContainer* aContainer,
int64_t aOffset,
int64_t aTime,
const media::TimeUnit& aTime,
const media::TimeUnit& aDuration,
const YCbCrBuffer& aBuffer,
const YCbCrBuffer::Plane& aAlphaPlane,
bool aKeyframe,
int64_t aTimecode,
const media::TimeUnit& aTimecode,
const IntRect& aPicture);
static already_AddRefed<VideoData> CreateAndCopyIntoTextureClient(
const VideoInfo& aInfo,
int64_t aOffset,
int64_t aTime,
const media::TimeUnit& aTime,
const media::TimeUnit& aDuration,
layers::TextureClient* aBuffer,
bool aKeyframe,
int64_t aTimecode,
const media::TimeUnit& aTimecode,
const IntRect& aPicture);
static already_AddRefed<VideoData> CreateFromImage(
const IntSize& aDisplay,
int64_t aOffset,
int64_t aTime,
const media::TimeUnit& aTime,
const media::TimeUnit& aDuration,
const RefPtr<Image>& aImage,
bool aKeyframe,
int64_t aTimecode);
const media::TimeUnit& aTimecode);
// Initialize PlanarYCbCrImage. Only When aCopyData is true,
// video data is copied to PlanarYCbCrImage.
@ -548,10 +550,10 @@ public:
int32_t mFrameID;
VideoData(int64_t aOffset,
int64_t aTime,
int64_t aDuration,
const media::TimeUnit& aTime,
const media::TimeUnit& aDuration,
bool aKeyframe,
int64_t aTimecode,
const media::TimeUnit& aTimecode,
IntSize aDisplay,
uint32_t aFrameID);

Просмотреть файл

@ -1390,8 +1390,8 @@ private:
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
RefPtr<AudioData> data(new AudioData(
aAudio->mOffset, mSeekJob.mTarget->GetTime().ToMicroseconds(),
duration.ToMicroseconds(), frames, Move(audioData), channels,
aAudio->mOffset, mSeekJob.mTarget->GetTime(),
duration, frames, Move(audioData), channels,
aAudio->mRate));
MOZ_ASSERT(AudioQueue().GetSize() == 0,
"Should be the 1st sample after seeking");

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -55,12 +55,10 @@ namespace ipc {
class PrincipalInfo;
}
class MediaManager;
class GetUserMediaCallbackMediaStreamListener;
class GetUserMediaTask;
extern LogModule* GetMediaManagerLog();
#define MM_LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
class GetUserMediaWindowListener;
class MediaManager;
class SourceListener;
class MediaDevice : public nsIMediaDevice
{
@ -136,34 +134,29 @@ class GetUserMediaNotificationEvent: public Runnable
enum GetUserMediaStatus {
STARTING,
STOPPING,
STOPPED_TRACK,
};
GetUserMediaNotificationEvent(GetUserMediaCallbackMediaStreamListener* aListener,
GetUserMediaStatus aStatus,
bool aIsAudio, bool aIsVideo, uint64_t aWindowID);
GetUserMediaNotificationEvent(GetUserMediaStatus aStatus,
uint64_t aWindowID);
GetUserMediaNotificationEvent(GetUserMediaStatus aStatus,
already_AddRefed<DOMMediaStream> aStream,
OnTracksAvailableCallback* aOnTracksAvailableCallback,
bool aIsAudio, bool aIsVideo, uint64_t aWindowID,
already_AddRefed<media::Refcountable<UniquePtr<OnTracksAvailableCallback>>> aOnTracksAvailableCallback,
uint64_t aWindowID,
already_AddRefed<nsIDOMGetUserMediaErrorCallback> aError);
virtual ~GetUserMediaNotificationEvent();
NS_IMETHOD Run() override;
protected:
RefPtr<GetUserMediaCallbackMediaStreamListener> mListener; // threadsafe
RefPtr<GetUserMediaWindowListener> mListener; // threadsafe
RefPtr<DOMMediaStream> mStream;
nsAutoPtr<OnTracksAvailableCallback> mOnTracksAvailableCallback;
RefPtr<media::Refcountable<UniquePtr<OnTracksAvailableCallback>>> mOnTracksAvailableCallback;
GetUserMediaStatus mStatus;
bool mIsAudio;
bool mIsVideo;
uint64_t mWindowID;
RefPtr<nsIDOMGetUserMediaErrorCallback> mOnFailure;
};
typedef enum {
MEDIA_START,
MEDIA_STOP,
MEDIA_STOP_TRACK,
MEDIA_DIRECT_LISTENERS,
@ -172,30 +165,30 @@ typedef enum {
class ReleaseMediaOperationResource : public Runnable
{
public:
ReleaseMediaOperationResource(already_AddRefed<DOMMediaStream> aStream,
OnTracksAvailableCallback* aOnTracksAvailableCallback):
ReleaseMediaOperationResource(
already_AddRefed<DOMMediaStream> aStream,
already_AddRefed<media::Refcountable<UniquePtr<OnTracksAvailableCallback>>> aOnTracksAvailableCallback):
mStream(aStream),
mOnTracksAvailableCallback(aOnTracksAvailableCallback) {}
NS_IMETHOD Run() override {return NS_OK;}
private:
RefPtr<DOMMediaStream> mStream;
nsAutoPtr<OnTracksAvailableCallback> mOnTracksAvailableCallback;
RefPtr<media::Refcountable<UniquePtr<OnTracksAvailableCallback>>> mOnTracksAvailableCallback;
};
typedef nsTArray<RefPtr<GetUserMediaCallbackMediaStreamListener> > StreamListeners;
typedef nsClassHashtable<nsUint64HashKey, StreamListeners> WindowTable;
typedef nsRefPtrHashtable<nsUint64HashKey, GetUserMediaWindowListener> WindowTable;
// we could add MediaManager if needed
typedef void (*WindowListenerCallback)(MediaManager *aThis,
uint64_t aWindowID,
StreamListeners *aListeners,
GetUserMediaWindowListener *aListener,
void *aData);
class MediaManager final : public nsIMediaManagerService,
public nsIObserver
,public DeviceChangeCallback
{
friend GetUserMediaCallbackMediaStreamListener;
friend SourceListener;
public:
static already_AddRefed<MediaManager> GetInstance();
@ -216,9 +209,7 @@ public:
}
static nsresult NotifyRecordingStatusChange(nsPIDOMWindowInner* aWindow,
const nsString& aMsg,
const bool& aIsAudio,
const bool& aIsVideo);
const nsString& aMsg);
NS_DECL_THREADSAFE_ISUPPORTS
NS_DECL_NSIOBSERVER
@ -226,17 +217,23 @@ public:
media::Parent<media::NonE10s>* GetNonE10sParent();
MediaEngine* GetBackend(uint64_t aWindowId = 0);
StreamListeners *GetWindowListeners(uint64_t aWindowId) {
WindowTable *GetActiveWindows() {
MOZ_ASSERT(NS_IsMainThread());
return mActiveWindows.Get(aWindowId);
return &mActiveWindows;
}
GetUserMediaWindowListener *GetWindowListener(uint64_t aWindowId) {
MOZ_ASSERT(NS_IsMainThread());
return mActiveWindows.GetWeak(aWindowId);
}
void AddWindowID(uint64_t aWindowId, GetUserMediaWindowListener *aListener);
void RemoveWindowID(uint64_t aWindowId);
bool IsWindowStillActive(uint64_t aWindowId) {
return !!GetWindowListeners(aWindowId);
return !!GetWindowListener(aWindowId);
}
// Note: also calls aListener->Remove(), even if inactive
void RemoveFromWindowList(uint64_t aWindowID,
GetUserMediaCallbackMediaStreamListener *aListener);
GetUserMediaWindowListener *aListener);
nsresult GetUserMedia(
nsPIDOMWindowInner* aWindow,
@ -293,12 +290,6 @@ private:
bool aIsChrome,
RefPtr<media::Refcountable<UniquePtr<SourceSet>>>& aSources);
StreamListeners* AddWindowID(uint64_t aWindowId);
WindowTable *GetActiveWindows() {
MOZ_ASSERT(NS_IsMainThread());
return &mActiveWindows;
}
void GetPref(nsIPrefBranch *aBranch, const char *aPref,
const char *aData, int32_t *aVal);
void GetPrefBool(nsIPrefBranch *aBranch, const char *aPref,

Просмотреть файл

@ -176,11 +176,11 @@ bool AndroidMediaReader::DecodeVideoFrame(bool& aKeyframeSkip,
if (currentImage) {
v = VideoData::CreateFromImage(mInfo.mVideo.mDisplay,
pos,
frame.mTimeUs,
TimeUnit::FromMicroseconds(frame.mTimeUs),
TimeUnit::FromMicroseconds(1), // We don't know the duration yet.
currentImage,
frame.mKeyFrame,
-1);
TimeUnit::FromMicroseconds(-1));
} else {
// Assume YUV
VideoData::YCbCrBuffer b;
@ -221,11 +221,11 @@ bool AndroidMediaReader::DecodeVideoFrame(bool& aKeyframeSkip,
v = VideoData::CreateAndCopyData(mInfo.mVideo,
mDecoder->GetImageContainer(),
pos,
frame.mTimeUs,
TimeUnit::FromMicroseconds(frame.mTimeUs),
TimeUnit::FromMicroseconds(1), // We don't know the duration yet.
b,
frame.mKeyFrame,
-1,
TimeUnit::FromMicroseconds(-1),
picture);
}

Просмотреть файл

@ -640,11 +640,11 @@ ChromiumCDMParent::RecvDecoded(const CDMVideoFrame& aFrame)
mVideoInfo,
mImageContainer,
mLastStreamOffset,
aFrame.mTimestamp(),
media::TimeUnit::FromMicroseconds(aFrame.mTimestamp()),
media::TimeUnit::FromMicroseconds(aFrame.mDuration()),
b,
false,
-1,
media::TimeUnit::FromMicroseconds(-1),
pictureRegion);
// Return the shmem to the CDM so the shmem can be reused to send us

Просмотреть файл

@ -48,11 +48,11 @@ VideoDecoderChild::RecvOutput(const VideoDataIPDL& aData)
RefPtr<VideoData> video = VideoData::CreateFromImage(
aData.display(),
aData.base().offset(),
aData.base().time(),
media::TimeUnit::FromMicroseconds(aData.base().time()),
media::TimeUnit::FromMicroseconds(aData.base().duration()),
image,
aData.base().keyframe(),
aData.base().timecode());
media::TimeUnit::FromMicroseconds(aData.base().timecode()));
mDecodedData.AppendElement(Move(video));
return IPC_OK();

Просмотреть файл

@ -494,16 +494,16 @@ AudioSink::CreateAudioFromBuffer(AlignedAudioBuffer&& aBuffer,
if (!frames) {
return nullptr;
}
CheckedInt64 duration = FramesToUsecs(frames, mOutputRate);
if (!duration.isValid()) {
auto duration = FramesToTimeUnit(frames, mOutputRate);
if (!duration.IsValid()) {
NS_WARNING("Int overflow in AudioSink");
mErrored = true;
return nullptr;
}
RefPtr<AudioData> data =
new AudioData(aReference->mOffset,
aReference->mTime.ToMicroseconds(),
duration.value(),
aReference->mTime,
duration,
frames,
Move(aBuffer),
mOutputChannels,

Просмотреть файл

@ -75,11 +75,11 @@ BlankVideoDataCreator::Create(MediaRawData* aSample)
return VideoData::CreateAndCopyData(mInfo,
mImageContainer,
aSample->mOffset,
aSample->mTime.ToMicroseconds(),
aSample->mTime,
aSample->mDuration,
buffer,
aSample->mKeyframe,
aSample->mTime.ToMicroseconds(),
aSample->mTime,
mPicture);
}
@ -116,8 +116,8 @@ BlankAudioDataCreator::Create(MediaRawData* aSample)
mFrameSum++;
}
RefPtr<AudioData> data(new AudioData(aSample->mOffset,
aSample->mTime.ToMicroseconds(),
aSample->mDuration.ToMicroseconds(),
aSample->mTime,
aSample->mDuration,
uint32_t(frames.value()),
Move(samples),
mChannelCount,

Просмотреть файл

@ -17,10 +17,10 @@ public:
// Create a dummy VideoData with no image. This gives us something to
// send to media streams if necessary.
RefPtr<VideoData> v(new VideoData(aSample->mOffset,
aSample->mTime.ToMicroseconds(),
aSample->mDuration.ToMicroseconds(),
aSample->mTime,
aSample->mDuration,
aSample->mKeyframe,
aSample->mTimecode.ToMicroseconds(),
aSample->mTimecode,
gfx::IntSize(),
0));
return v.forget();

Просмотреть файл

@ -232,7 +232,7 @@ OpusDataDecoder::ProcessDecode(MediaRawData* aSample)
__func__);
}
NS_ASSERTION(ret == frames, "Opus decoded too few audio samples");
CheckedInt64 startTime = aSample->mTime.ToMicroseconds();
auto startTime = aSample->mTime;
// Trim the initial frames while the decoder is settling.
if (mSkip > 0) {
@ -243,7 +243,7 @@ OpusDataDecoder::ProcessDecode(MediaRawData* aSample)
PodMove(buffer.get(),
buffer.get() + skipFrames * channels,
keepFrames * channels);
startTime = startTime + FramesToUsecs(skipFrames, mOpusParser->mRate);
startTime = startTime + FramesToTimeUnit(skipFrames, mOpusParser->mRate);
frames = keepFrames;
mSkip -= skipFrames;
}
@ -287,17 +287,17 @@ OpusDataDecoder::ProcessDecode(MediaRawData* aSample)
}
#endif
CheckedInt64 duration = FramesToUsecs(frames, mOpusParser->mRate);
if (!duration.isValid()) {
auto duration = FramesToTimeUnit(frames, mOpusParser->mRate);
if (!duration.IsValid()) {
return DecodePromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_OVERFLOW_ERR,
RESULT_DETAIL("Overflow converting WebM audio duration")),
__func__);
}
CheckedInt64 time = startTime -
FramesToUsecs(mOpusParser->mPreSkip, mOpusParser->mRate) +
FramesToUsecs(mFrames, mOpusParser->mRate);
if (!time.isValid()) {
auto time = startTime -
FramesToTimeUnit(mOpusParser->mPreSkip, mOpusParser->mRate) +
FramesToTimeUnit(mFrames, mOpusParser->mRate);
if (!time.IsValid()) {
return DecodePromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_OVERFLOW_ERR,
RESULT_DETAIL("Overflow shifting tstamp by codec delay")),
@ -308,7 +308,7 @@ OpusDataDecoder::ProcessDecode(MediaRawData* aSample)
mFrames += frames;
return DecodePromise::CreateAndResolve(
DecodedData{ new AudioData(aSample->mOffset, time.value(), duration.value(),
DecodedData{ new AudioData(aSample->mOffset, time, duration,
frames, Move(buffer), mOpusParser->mChannels,
mOpusParser->mRate) },
__func__);

Просмотреть файл

@ -172,11 +172,11 @@ TheoraDecoder::ProcessDecode(MediaRawData* aSample)
VideoData::CreateAndCopyData(info,
mImageContainer,
aSample->mOffset,
aSample->mTime.ToMicroseconds(),
aSample->mTime,
aSample->mDuration,
b,
aSample->mKeyframe,
aSample->mTimecode.ToMicroseconds(),
aSample->mTimecode,
mInfo.ScaledImageRect(mTheoraInfo.frame_width,
mTheoraInfo.frame_height));
if (!v) {

Просмотреть файл

@ -207,11 +207,11 @@ VPXDecoder::ProcessDecode(MediaRawData* aSample)
v = VideoData::CreateAndCopyData(mInfo,
mImageContainer,
aSample->mOffset,
aSample->mTime.ToMicroseconds(),
aSample->mTime,
aSample->mDuration,
b,
aSample->mKeyframe,
aSample->mTimecode.ToMicroseconds(),
aSample->mTimecode,
mInfo.ScaledImageRect(img->d_w,
img->d_h));
} else {
@ -224,12 +224,12 @@ VPXDecoder::ProcessDecode(MediaRawData* aSample)
v = VideoData::CreateAndCopyData(mInfo,
mImageContainer,
aSample->mOffset,
aSample->mTime.ToMicroseconds(),
aSample->mTime,
aSample->mDuration,
b,
alpha_plane,
aSample->mKeyframe,
aSample->mTimecode.ToMicroseconds(),
aSample->mTimecode,
mInfo.ScaledImageRect(img->d_w,
img->d_h));

Просмотреть файл

@ -141,7 +141,7 @@ VorbisDataDecoder::ProcessDecode(MediaRawData* aSample)
const unsigned char* aData = aSample->Data();
size_t aLength = aSample->Size();
int64_t aOffset = aSample->mOffset;
int64_t aTstampUsecs = aSample->mTime.ToMicroseconds();
auto aTstampUsecs = aSample->mTime;
int64_t aTotalFrames = 0;
MOZ_ASSERT(mPacketCount >= 3);
@ -195,23 +195,23 @@ VorbisDataDecoder::ProcessDecode(MediaRawData* aSample)
}
}
CheckedInt64 duration = FramesToUsecs(frames, rate);
if (!duration.isValid()) {
auto duration = FramesToTimeUnit(frames, rate);
if (!duration.IsValid()) {
return DecodePromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_OVERFLOW_ERR,
RESULT_DETAIL("Overflow converting audio duration")),
__func__);
}
CheckedInt64 total_duration = FramesToUsecs(mFrames, rate);
if (!total_duration.isValid()) {
auto total_duration = FramesToTimeUnit(mFrames, rate);
if (!total_duration.IsValid()) {
return DecodePromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_OVERFLOW_ERR,
RESULT_DETAIL("Overflow converting audio total_duration")),
__func__);
}
CheckedInt64 time = total_duration + aTstampUsecs;
if (!time.isValid()) {
auto time = total_duration + aTstampUsecs;
if (!time.IsValid()) {
return DecodePromise::CreateAndReject(
MediaResult(
NS_ERROR_DOM_MEDIA_OVERFLOW_ERR,
@ -237,7 +237,7 @@ VorbisDataDecoder::ProcessDecode(MediaRawData* aSample)
aTotalFrames += frames;
results.AppendElement(new AudioData(aOffset, time.value(), duration.value(),
results.AppendElement(new AudioData(aOffset, time, duration,
frames, data.Forget(), channels, rate));
mFrames += frames;
err = vorbis_synthesis_read(&mVorbisDsp, frames);

Просмотреть файл

@ -79,7 +79,6 @@ WaveDataDecoder::ProcessDecode(MediaRawData* aSample)
size_t aLength = aSample->Size();
ByteReader aReader(aSample->Data(), aLength);
int64_t aOffset = aSample->mOffset;
uint64_t aTstampUsecs = aSample->mTime.ToMicroseconds();
int32_t frames = aLength * 8 / mInfo.mBitDepth / mInfo.mChannels;
@ -118,10 +117,10 @@ WaveDataDecoder::ProcessDecode(MediaRawData* aSample)
}
}
int64_t duration = frames / mInfo.mRate;
auto duration = media::TimeUnit::FromMicroseconds(frames / mInfo.mRate);
return DecodePromise::CreateAndResolve(
DecodedData{ new AudioData(aOffset, aTstampUsecs, duration, frames,
DecodedData{ new AudioData(aOffset, aSample->mTime, duration, frames,
Move(buffer), mInfo.mChannels, mInfo.mRate) },
__func__);
}

Просмотреть файл

@ -66,11 +66,11 @@ GMPVideoDecoder::Decoded(GMPVideoi420Frame* aDecodedFrame)
mConfig,
mImageContainer,
mLastStreamOffset,
decodedFrame->Timestamp(),
media::TimeUnit::FromMicroseconds(decodedFrame->Timestamp()),
media::TimeUnit::FromMicroseconds(decodedFrame->Duration()),
b,
false,
-1,
media::TimeUnit::FromMicroseconds(-1),
pictureRegion);
RefPtr<GMPVideoDecoder> self = this;
if (v) {

Просмотреть файл

@ -138,10 +138,11 @@ public:
gl::OriginPos::BottomLeft);
RefPtr<VideoData> v = VideoData::CreateFromImage(
inputInfo.mDisplaySize, offset, presentationTimeUs,
inputInfo.mDisplaySize, offset,
TimeUnit::FromMicroseconds(presentationTimeUs),
TimeUnit::FromMicroseconds(inputInfo.mDurationUs),
img, !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
presentationTimeUs);
TimeUnit::FromMicroseconds(presentationTimeUs));
v->SetListener(Move(releaseSample));
mDecoder->UpdateOutputStatus(v);
@ -344,8 +345,8 @@ private:
aSample->WriteToByteBuffer(dest);
RefPtr<AudioData> data = new AudioData(
0, presentationTimeUs,
FramesToUsecs(numFrames, mOutputSampleRate).value(), numFrames,
0, TimeUnit::FromMicroseconds(presentationTimeUs),
FramesToTimeUnit(numFrames, mOutputSampleRate), numFrames,
Move(audio), mOutputChannels, mOutputSampleRate);
mDecoder->UpdateOutputStatus(data);

Просмотреть файл

@ -323,8 +323,8 @@ AppleATDecoder::DecodeSample(MediaRawData* aSample)
}
RefPtr<AudioData> audio = new AudioData(aSample->mOffset,
aSample->mTime.ToMicroseconds(),
duration.ToMicroseconds(),
aSample->mTime,
duration,
numFrames,
data.Forget(),
channels,

Просмотреть файл

@ -356,8 +356,8 @@ AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage,
if (useNullSample) {
data = new NullData(aFrameRef.byte_offset,
aFrameRef.composition_timestamp.ToMicroseconds(),
aFrameRef.duration.ToMicroseconds());
aFrameRef.composition_timestamp,
aFrameRef.duration);
} else if (mUseSoftwareImages) {
size_t width = CVPixelBufferGetWidth(aImage);
size_t height = CVPixelBufferGetHeight(aImage);
@ -412,11 +412,11 @@ AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage,
VideoData::CreateAndCopyData(info,
mImageContainer,
aFrameRef.byte_offset,
aFrameRef.composition_timestamp.ToMicroseconds(),
aFrameRef.composition_timestamp,
aFrameRef.duration,
buffer,
aFrameRef.is_sync_point,
aFrameRef.decode_timestamp.ToMicroseconds(),
aFrameRef.decode_timestamp,
visible);
// Unlock the returned image data.
CVPixelBufferUnlockBaseAddress(aImage, kCVPixelBufferLock_ReadOnly);
@ -432,11 +432,11 @@ AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage,
data =
VideoData::CreateFromImage(info.mDisplay,
aFrameRef.byte_offset,
aFrameRef.composition_timestamp.ToMicroseconds(),
aFrameRef.composition_timestamp,
aFrameRef.duration,
image.forget(),
aFrameRef.is_sync_point,
aFrameRef.decode_timestamp.ToMicroseconds());
aFrameRef.decode_timestamp);
#else
MOZ_ASSERT_UNREACHABLE("No MacIOSurface on iOS");
#endif

Просмотреть файл

@ -204,7 +204,7 @@ FFmpegAudioDecoder<LIBAV_VER>::ProcessDecode(MediaRawData* aSample)
}
results.AppendElement(new AudioData(
samplePosition, pts.ToMicroseconds(), duration.ToMicroseconds(),
samplePosition, pts, duration,
mFrame->nb_samples, Move(audio), numChannels, samplingRate));
pts = newpts;

Просмотреть файл

@ -343,11 +343,11 @@ FFmpegVideoDecoder<LIBAV_VER>::DoDecode(MediaRawData* aSample,
VideoData::CreateAndCopyData(mInfo,
mImageContainer,
aSample->mOffset,
pts,
TimeUnit::FromMicroseconds(pts),
TimeUnit::FromMicroseconds(duration),
b,
!!mFrame->key_frame,
-1,
TimeUnit::FromMicroseconds(-1),
mInfo.ScaledImageRect(mFrame->width,
mFrame->height));

Просмотреть файл

@ -994,11 +994,11 @@ MediaDataHelper::CreateYUV420VideoData(BufferData* aBufferData)
VideoData::CreateAndCopyData(info,
mImageContainer,
0, // Filled later by caller.
0, // Filled later by caller.
media::TimeUnit::Zero(), // Filled later by caller.
media::TimeUnit::FromMicroseconds(1), // We don't know the duration.
b,
0, // Filled later by caller.
-1,
media::TimeUnit::FromMicroseconds(-1),
info.ImageRect());
LOG("YUV420 VideoData: disp width %d, height %d, pic width %d, height %d, time %lld",

Просмотреть файл

@ -336,8 +336,8 @@ WMFAudioMFTManager::Output(int64_t aStreamOffset,
NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
aOutData = new AudioData(aStreamOffset,
timestamp.ToMicroseconds(),
duration.ToMicroseconds(),
timestamp,
duration,
numFrames,
Move(audioData),
mAudioChannels,

Просмотреть файл

@ -839,11 +839,11 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
VideoData::CreateAndCopyData(mVideoInfo,
mImageContainer,
aStreamOffset,
pts.ToMicroseconds(),
pts,
duration,
b,
false,
-1,
TimeUnit::FromMicroseconds(-1),
pictureRegion);
if (twoDBuffer) {
twoDBuffer->Unlock2D();
@ -866,11 +866,11 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
RefPtr<VideoData> v =
VideoData::CreateFromImage(mVideoInfo.mDisplay,
aStreamOffset,
pts.ToMicroseconds(),
pts,
duration,
image.forget(),
false,
-1);
TimeUnit::FromMicroseconds(-1));
v.forget(aOutVideoData);
return S_OK;
@ -904,11 +904,11 @@ WMFVideoMFTManager::CreateD3DVideoFrame(IMFSample* aSample,
NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
RefPtr<VideoData> v = VideoData::CreateFromImage(mVideoInfo.mDisplay,
aStreamOffset,
pts.ToMicroseconds(),
pts,
duration,
image.forget(),
false,
-1);
TimeUnit::FromMicroseconds(-1));
NS_ENSURE_TRUE(v, E_FAIL);
v.forget(aOutVideoData);

Просмотреть файл

@ -98,7 +98,7 @@ HTTP load media-element-source-seek-1.html
load offline-buffer-source-ended-1.html
load oscillator-ended-1.html
load oscillator-ended-2.html
skip-if(Android) load video-replay-after-audio-end.html # bug 1339449
load video-replay-after-audio-end.html
# This needs to run at the end to avoid leaking busted state into other tests.
load 691096-1.html
load 1236639.html

Просмотреть файл

@ -159,7 +159,7 @@ function MaybeCrossOriginURI(test, uri)
}
}
function AppendTrack(test, ms, track, token, loadParams)
function AppendTrack(test, ms, track, token)
{
return new Promise(function(resolve, reject) {
var sb;
@ -168,10 +168,6 @@ function AppendTrack(test, ms, track, token, loadParams)
var fragments = track.fragments;
var fragmentFile;
if (loadParams && loadParams.onlyLoadFirstFragments) {
fragments = fragments.slice(0, loadParams.onlyLoadFirstFragments);
}
function addNextFragment() {
if (curFragment >= fragments.length) {
Log(token, track.name + ": end of track");
@ -230,7 +226,7 @@ function AppendTrack(test, ms, track, token, loadParams)
//Returns a promise that is resolved when the media element is ready to have
//its play() function called; when it's loaded MSE fragments.
function LoadTest(test, elem, token, loadParams)
function LoadTest(test, elem, token)
{
if (!test.tracks) {
ok(false, token + " test does not have a tracks list");
@ -239,19 +235,16 @@ function LoadTest(test, elem, token, loadParams)
var ms = new MediaSource();
elem.src = URL.createObjectURL(ms);
elem.crossOrigin = test.crossOrigin || false;
return new Promise(function (resolve, reject) {
ms.addEventListener("sourceopen", function () {
Log(token, "sourceopen");
Promise.all(test.tracks.map(function(track) {
return AppendTrack(test, ms, track, token, loadParams);
return AppendTrack(test, ms, track, token);
})).then(function() {
if (loadParams && loadParams.noEndOfStream) {
Log(token, "Tracks loaded");
} else {
Log(token, "Tracks loaded, calling MediaSource.endOfStream()");
ms.endOfStream();
}
Log(token, "Tracks loaded, calling MediaSource.endOfStream()");
ms.endOfStream();
resolve();
}).catch(reject);
}, {once: true});
@ -266,192 +259,160 @@ function EMEPromise() {
});
}
// Finish |token| when all promises are resolved or any one promise is
// rejected. It also clean up the media element to release resources.
function EMEPromiseAll(v, token, promises) {
Promise.all(promises).then(values => {
removeNodeAndSource(v);
manager.finished(token);
}, reason => {
ok(false, TimeStamp(token) + " - Error during load: " + reason);
removeNodeAndSource(v);
manager.finished(token);
});
}
function SetupEME(test, token, params)
{
var v = document.createElement("video");
v.crossOrigin = test.crossOrigin || false;
v.sessions = [];
v.closeSessions = function() {
return Promise.all(v.sessions.map(s => s.close().then(() => s.closed))).then(
() => {
v.setMediaKeys(null);
if (v.parentNode) {
v.remove();
}
v.onerror = null;
v.src = null;
});
};
// Log events dispatched to make debugging easier...
[ "canplay", "canplaythrough", "ended", "error", "loadeddata",
"loadedmetadata", "loadstart", "pause", "play", "playing", "progress",
"stalled", "suspend", "waiting", "waitingforkey",
].forEach(function (e) {
v.addEventListener(e, function(event) {
Log(token, "" + e);
});
});
// Finish the test when error is encountered.
v.onerror = bail(token + " got error event");
var onSetKeysFail = (params && params.onSetKeysFail)
? params.onSetKeysFail
: bail(token + " Failed to set MediaKeys on <video> element");
// null: No session management in progress, just go ahead and update the session.
// [...]: Session management in progress, add {initDataType, initData} to
// this queue to get it processed when possible.
var initDataQueue = [];
function pushInitData(ev)
{
if (initDataQueue === null) {
initDataQueue = [];
}
initDataQueue.push(ev);
if (params && params.onInitDataQueued) {
params.onInitDataQueued(ev, ev.initDataType, StringToHex(ArrayBufferToString(ev.initData)));
}
}
function processInitDataQueue()
{
function maybeResolveInitDataPromise() {
if (params && params.initDataPromise) {
params.initDataPromise.resolve();
}
}
if (initDataQueue === null) {
maybeResolveInitDataPromise();
return;
}
// If we're processed all our init data null the queue to indicate encrypted event handled.
if (initDataQueue.length === 0) {
initDataQueue = null;
maybeResolveInitDataPromise();
return;
}
var ev = initDataQueue.shift();
var sessionType = (params && params.sessionType) ? params.sessionType : "temporary";
Log(token, "createSession(" + sessionType + ") for (" + ev.initDataType + ", " + StringToHex(ArrayBufferToString(ev.initData)) + ")");
var session = v.mediaKeys.createSession(sessionType);
if (params && params.onsessioncreated) {
params.onsessioncreated(session);
}
v.sessions.push(session);
return new Promise(function (resolve, reject) {
session.addEventListener("message", UpdateSessionFunc(test, token, sessionType, resolve, reject));
Log(token, "session[" + session.sessionId + "].generateRequest(" + ev.initDataType + ", " + StringToHex(ArrayBufferToString(ev.initData)) + ")");
session.generateRequest(ev.initDataType, ev.initData).catch(function(reason) {
// Reject the promise if generateRequest() failed. Otherwise it will
// be resolve in UpdateSessionFunc().
bail(token + ": session[" + session.sessionId + "].generateRequest(" + ev.initDataType + ", " + StringToHex(ArrayBufferToString(ev.initData)) + ") failed")(reason);
reject();
});
})
.then(function(aSession) {
Log(token, "session[" + session.sessionId + "].generateRequest(" + ev.initDataType + ", " + StringToHex(ArrayBufferToString(ev.initData)) + ") succeeded");
if (params && params.onsessionupdated) {
params.onsessionupdated(aSession);
}
processInitDataQueue();
});
}
/*
* Create a new MediaKeys object.
* Return a promise which will be resolved with a new MediaKeys object,
* or will be rejected with a string that describes the failure.
*/
function CreateMediaKeys(v, test, token) {
let p = new EMEPromise;
function streamType(type) {
var x = test.tracks.find(o => o.name == type);
return x ? x.type : undefined;
}
// If sessions are to be delayed we won't peform any processing until the
// callback the assigned here is called by the test.
if (params && params.delaySessions) {
params.ProcessSessions = processInitDataQueue;
function onencrypted(ev) {
var options = { initDataTypes: [ev.initDataType] };
if (streamType("video")) {
options.videoCapabilities = [{contentType: streamType("video")}];
}
if (streamType("audio")) {
options.audioCapabilities = [{contentType: streamType("audio")}];
}
navigator.requestMediaKeySystemAccess(CLEARKEY_KEYSYSTEM, [options])
.then(keySystemAccess => {
keySystemAccess.createMediaKeys().then(
p.resolve,
() => p.reject(`${token} Failed to create MediaKeys object.`)
);
}, () => p.reject(`${token} Failed to request key system access.`));
}
// Is this the first piece of init data we're processing?
var firstInitData = true;
v.addEventListener("encrypted", function(ev) {
if (firstInitData) {
Log(token, "got first encrypted(" + ev.initDataType + ", " + StringToHex(ArrayBufferToString(ev.initData)) + "), setup session");
firstInitData = false;
pushInitData(ev);
v.addEventListener("encrypted", onencrypted, {once: true});
return p.promise;
}
function chain(promise, onReject) {
return promise.then(function(value) {
return Promise.resolve(value);
}).catch(function(reason) {
onReject(reason);
return Promise.reject();
})
}
/*
* Create a new MediaKeys object and provide it to the media element.
* Return a promise which will be resolved if succeeded, or will be rejected
* with a string that describes the failure.
*/
function CreateAndSetMediaKeys(v, test, token) {
let p = new EMEPromise;
var options = { initDataTypes: [ev.initDataType] };
if (streamType("video")) {
options.videoCapabilities = [{contentType: streamType("video")}];
}
if (streamType("audio")) {
options.audioCapabilities = [{contentType: streamType("audio")}];
}
CreateMediaKeys(v, test, token).then(mediaKeys => {
v.setMediaKeys(mediaKeys).then(
p.resolve,
() => p.reject(`${token} Failed to set MediaKeys on <video> element.`)
);
}, p.reject)
var p = navigator.requestMediaKeySystemAccess(CLEARKEY_KEYSYSTEM, [options]);
var r = bail(token + " Failed to request key system access.");
chain(p, r)
.then(function(keySystemAccess) {
var p = keySystemAccess.createMediaKeys();
var r = bail(token + " Failed to create MediaKeys object");
return chain(p, r);
})
return p.promise;
}
.then(function(mediaKeys) {
Log(token, "created MediaKeys object ok");
mediaKeys.sessions = [];
var p = v.setMediaKeys(mediaKeys);
return chain(p, onSetKeysFail);
})
/*
* Collect the init data from 'encrypted' events.
* Return a promise which will be resolved with the init data when collection
* is completed (specified by test.sessionCount).
*/
function LoadInitData(v, test, token) {
let p = new EMEPromise;
let initDataQueue = [];
.then(function() {
Log(token, "set MediaKeys on <video> element ok");
if (params && params.onMediaKeysSet) {
params.onMediaKeysSet();
}
if (!(params && params.delaySessions)) {
processInitDataQueue();
}
})
} else {
if (params && params.delaySessions) {
Log(token, "got encrypted(" + ev.initDataType + ", " + StringToHex(ArrayBufferToString(ev.initData)) + ") event, queue it in because we're delaying sessions");
pushInitData(ev);
} else if (initDataQueue !== null) {
Log(token, "got encrypted(" + ev.initDataType + ", " + StringToHex(ArrayBufferToString(ev.initData)) + ") event, queue it for later session update");
pushInitData(ev);
} else {
Log(token, "got encrypted(" + ev.initDataType + ", " + StringToHex(ArrayBufferToString(ev.initData)) + ") event, update session now");
pushInitData(ev);
processInitDataQueue();
}
function onencrypted(ev) {
initDataQueue.push(ev);
Log(token, `got encrypted(${ev.initDataType}, ` +
`${StringToHex(ArrayBufferToString(ev.initData))}) event.`);
if (test.sessionCount == initDataQueue.length) {
p.resolve(initDataQueue);
}
}
v.addEventListener("encrypted", onencrypted);
return p.promise;
}
/*
* Generate a license request and update the session.
* Return a promsise which will be resolved with the updated session
* or rejected with a string that describes the failure.
*/
function MakeRequest(test, token, ev, session, sessionType) {
sessionType = sessionType || "temporary";
let p = new EMEPromise;
let str = `session[${session.sessionId}].generateRequest(` +
`${ev.initDataType}, ${StringToHex(ArrayBufferToString(ev.initData))})`;
session.addEventListener("message",
UpdateSessionFunc(test, token, sessionType, p.resolve, p.reject));
Log(token, str);
session.generateRequest(ev.initDataType, ev.initData)
.catch(reason => {
// Reject the promise if generateRequest() failed.
// Otherwise it will be resolved in UpdateSessionFunc().
p.reject(`${token}: ${str} failed; ${reason}`);
});
return v;
return p.promise;
}
/*
* Process the init data by calling MakeRequest().
* Return a promise which will be resolved with the updated sessions
* when all init data are processed or rejected if any failure.
*/
function ProcessInitData(v, test, token, initData, sessionType) {
return Promise.all(
initData.map(ev => {
let session = v.mediaKeys.createSession(sessionType);
return MakeRequest(test, token, ev, session, sessionType);
})
);
}
/*
* Clean up the |v| element.
*/
function CleanUpMedia(v) {
v.setMediaKeys(null);
v.remove();
v.onerror = null;
v.src = null;
}
/*
* Close all sessions and clean up the |v| element.
*/
function CloseSessions(v, sessions) {
return Promise.all(sessions.map(s => s.close()))
.then(CleanUpMedia(v));
}
/*
* Set up media keys and source buffers for the media element.
* Return a promise resolved when all key sessions are updated or rejected
* if any failure.
*/
function SetupEME(v, test, token) {
let p = new EMEPromise;
v.onerror = function() {
p.reject(`${token} got an error event.`);
}
Promise.all([
LoadInitData(v, test, token),
CreateAndSetMediaKeys(v, test, token),
LoadTest(test, v, token)])
.then(values => {
let initData = values[0];
return ProcessInitData(v, test, token, initData);
})
.then(p.resolve, p.reject);
return p.promise;
}
function SetupEMEPref(callback) {

Просмотреть файл

@ -16,11 +16,7 @@ function startTest(test, token)
{
manager.started(token);
var sessions = [];
// Will be resolved when all initData are processed.
let initDataPromise = new EMEPromise;
var v = SetupEME(test, token, { initDataPromise: initDataPromise });
let v = document.createElement("video");
v.preload = "auto"; // Required due to "canplay" not firing for MSE unless we do this.
var p1 = new EMEPromise;
@ -41,8 +37,14 @@ function startTest(test, token)
p1.resolve();
});
var p2 = LoadTest(test, v, token, { onlyLoadFirstFragments:2, noEndOfStream:false });
EMEPromiseAll(v, token, [p1.promise, p2, initDataPromise.promise]);
let p2 = SetupEME(v, test, token);
Promise.all([p1.promise, p2])
.catch(reason => ok(false, reason))
.then(() => {
CleanUpMedia(v);
manager.finished(token);
});
}
function beginTest() {

Просмотреть файл

@ -78,37 +78,35 @@ function startTest(test, token)
var sessions = [];
var v = SetupEME(test, token,
{
onsessioncreated: function(session) {
sessions.push(session);
session.addEventListener("keystatuseschange", KeysChangeFunc(session, test.keys, token));
function onSessionCreated(session) {
sessions.push(session);
session.addEventListener("keystatuseschange", KeysChangeFunc(session, test.keys, token));
session.numKeystatuseschangeEvents = 0;
session.numOnkeystatuseschangeEvents = 0;
session.numKeystatuseschangeEvents = 0;
session.numOnkeystatuseschangeEvents = 0;
session.addEventListener("keystatuseschange", function() {
session.numKeystatuseschangeEvents += 1;
});
session.onkeystatuseschange = function() {
session.numOnkeystatuseschangeEvents += 1;
};
session.addEventListener("keystatuseschange", function() {
session.numKeystatuseschangeEvents += 1;
});
session.onkeystatuseschange = function() {
session.numOnkeystatuseschangeEvents += 1;
};
session.numMessageEvents = 0;
session.numOnMessageEvents = 0;
session.addEventListener("message", function() {
session.numMessageEvents += 1;
});
session.onmessage = function() {
session.numOnMessageEvents += 1;
};
}
}
);
session.numMessageEvents = 0;
session.numOnMessageEvents = 0;
session.addEventListener("message", function() {
session.numMessageEvents += 1;
});
session.onmessage = function() {
session.numOnMessageEvents += 1;
};
}
let v = document.createElement("video");
document.body.appendChild(v);
var gotEncrypted = 0;
let finish = new EMEPromise;
v.addEventListener("encrypted", function(ev) {
gotEncrypted += 1;
@ -160,16 +158,25 @@ function startTest(test, token)
ok(keyIdsReceived[kid], TimeStamp(token) + " key with id " + kid + " was usable as expected");
}
v.closeSessions().then(() => manager.finished(token));
CloseSessions(v, sessions).then(finish.resolve, finish.reject);
});
LoadTest(test, v, token)
.then(function() {
Promise.all([
LoadInitData(v, test, token),
CreateAndSetMediaKeys(v, test, token),
LoadTest(test, v, token)])
.then(values => {
v.play();
}).catch(function() {
ok(false, token + " failed to load");
manager.finished(token);
});
let initData = values[0];
initData.map(ev => {
let session = v.mediaKeys.createSession();
onSessionCreated(session);
MakeRequest(test, token, ev, session);
});
return finish.promise;
})
.catch(reason => ok(false, reason))
.then(() => manager.finished(token));
}
function beginTest() {

Просмотреть файл

@ -22,19 +22,39 @@ function startTest(test, token)
// Case 1. setting MediaKeys on an element captured by MediaElementSource should fail.
var p1 = new EMEPromise;
var case1token = token + "_case1";
var setKeysFailed = function() {
ok(true, TimeStamp(case1token) + " setMediaKeys failed as expected.");
p1.resolve();
};
var v1 = SetupEME(test, case1token, { onSetKeysFail: setKeysFailed });
let v1 = document.createElement("video");
function setMediaKeys() {
let p = new EMEPromise;
CreateMediaKeys(v1, test, case1token)
.then(mediaKeys => {
v1.setMediaKeys(mediaKeys)
.then(() => {
p.reject(`${case1token} setMediaKeys shouldn't succeed.`);
}, () => {
ok(true, TimeStamp(case1token) + " setMediaKeys failed as expected.");
p.resolve();
})
}, p.reject);
return p.promise;
}
var context = new AudioContext();
var node = context.createMediaElementSource(v1);
v1.addEventListener("loadeddata", function(ev) {
ok(false, TimeStamp(case1token) + " should never reach loadeddata, as setMediaKeys should fail");
});
manager.started(case1token);
var p2 = LoadTest(test, v1, case1token, { onlyLoadFirstFragments:2, noEndOfStream:false });
EMEPromiseAll(v1, case1token, [p1.promise, p2]);
Promise.all([
LoadTest(test, v1, case1token),
setMediaKeys()])
.catch(reason => ok(false, reason))
.then(() => {
CleanUpMedia(v1);
manager.finished(case1token);
});
}
function beginTest() {

Просмотреть файл

@ -22,7 +22,8 @@ function startTest(test, token)
// Case 2. creating a MediaElementSource on a media element with a MediaKeys should fail.
var p1 = new EMEPromise;
var case2token = token + "_case2";
var v2 = SetupEME(test, case2token);
let v2 = document.createElement("video");
v2.addEventListener("loadeddata", function(ev) {
ok(true, case2token + " should reach loadeddata");
var threw = false;
@ -35,9 +36,16 @@ function startTest(test, token)
ok(threw, "Should throw an error creating a MediaElementSource on an EME video.");
p1.resolve();
});
manager.started(case2token);
var p2 = LoadTest(test, v2, case2token, { onlyLoadFirstFragments:2, noEndOfStream:false });
EMEPromiseAll(v2, case2token, [p1.promise, p2]);
let p2 = SetupEME(v2, test, case2token);
Promise.all([p1.promise, p2])
.catch(reason => ok(false, reason))
.then(() => {
CleanUpMedia(v2);
manager.finished(case2token);
});
}
function beginTest() {

Просмотреть файл

@ -22,7 +22,8 @@ function startTest(test, token)
// Case 3. capturing a media element with mozCaptureStream that has a MediaKeys should fail.
var p1 = new EMEPromise;
var case3token = token + "_case3";
var v3 = SetupEME(test, case3token);
let v3 = document.createElement("video");
v3.addEventListener("loadeddata", function(ev) {
ok(true, TimeStamp(case3token) + " should reach loadeddata");
var threw = false;
@ -34,9 +35,16 @@ function startTest(test, token)
ok(threw, TimeStamp(case3token) + " Should throw an error calling mozCaptureStreamUntilEnded an EME video.");
p1.resolve();
});
manager.started(case3token);
var p2 = LoadTest(test, v3, case3token, { onlyLoadFirstFragments:2, noEndOfStream:false });
EMEPromiseAll(v3, case3token, [p1.promise, p2]);
let p2 = SetupEME(v3, test, case3token);
Promise.all([p1.promise, p2])
.catch(reason => ok(false, reason))
.then(() => {
CleanUpMedia(v3);
manager.finished(case3token);
});
}
function beginTest() {

Просмотреть файл

@ -14,60 +14,18 @@ var manager = new MediaTestManager;
function startTest(test, token)
{
// Test if the appropriate preconditions are met such that we can start
// prcoessing delayed sessions.
function TestIfDoneDelaying()
{
var got = "Got:";
if (loaded) { got += " loaded,"; }
got += " " + gotEncrypted + "/" + test.sessionCount + " sessions,";
got += " " + gotWaitingForKey + " waiting for key events"
if (loaded && gotEncrypted == test.sessionCount && gotWaitingForKey > 0) {
Log(token, got + " -> Update sessions with keys");
params.ProcessSessions();
} else {
Log(token, got + " -> Wait for more...");
}
}
manager.started(token);
var updatedSessionsCount = 0;
var loaded = false;
var params = {
// params will be populated with a ProcessSessions() callback, that can be
// called to process delayed sessions.
delaySessions: true,
// Function to be called once we start processing and updating sessions.
// This should only be called once the preconditions in TestIfDoneDealying
// are met.
onsessionupdated: function(session) {
updatedSessionsCount += 1;
if (updatedSessionsCount == test.sessionCount) {
info(TimeStamp(token) + " Updated all sessions, loading complete -> Play");
v.play();
} else {
info(TimeStamp(token) + " Updated " + updatedSessionsCount + "/" + test.sessionCount + " sessions so far");
}
},
};
var v = SetupEME(test, token, params);
let v = document.createElement("video");
document.body.appendChild(v);
var gotEncrypted = 0;
var gotWaitingForKey = 0;
var gotOnwaitingforkey = 0;
v.addEventListener("encrypted", function() {
gotEncrypted += 1;
TestIfDoneDelaying();
});
let waitForKey = new EMEPromise;
v.addEventListener("waitingforkey", function() {
gotWaitingForKey += 1;
TestIfDoneDelaying()
waitForKey.resolve();
});
v.onwaitingforkey = function() {
@ -80,6 +38,7 @@ function startTest(test, token)
is(v.isEncrypted, undefined, "isEncrypted should not be accessible from content");
});
let finish = new EMEPromise;
v.addEventListener("ended", function() {
ok(true, TimeStamp(token) + " got ended event");
// We expect only one waitingForKey as we delay until all sessions are ready.
@ -88,17 +47,26 @@ function startTest(test, token)
ok(gotWaitingForKey == 1, "Expected number 1 wait, got: " + gotWaitingForKey);
ok(gotOnwaitingforkey == gotWaitingForKey, "Should have as many event listener calls as event handler calls, got: " + gotOnwaitingforkey);
v.closeSessions().then(() => manager.finished(token));
finish.resolve();
});
LoadTest(test, v, token)
.then(function() {
loaded = true;
TestIfDoneDelaying();
}).catch(function() {
ok(false, token + " failed to load");
manager.finished(token);
});
Promise.all([
LoadInitData(v, test, token),
CreateAndSetMediaKeys(v, test, token),
LoadTest(test, v, token),
waitForKey.promise])
.then(values => {
let initData = values[0];
return ProcessInitData(v, test, token, initData);
})
.then(sessions => {
Log(token, "Updated all sessions, loading complete -> Play");
v.play();
finish.promise.then(() => CloseSessions(v, sessions));
return finish.promise;
})
.catch(reason => ok(false, reason))
.then(() => manager.finished(token));
}
function beginTest() {

Просмотреть файл

@ -1066,21 +1066,6 @@ AudioContext::Unmute() const
}
}
AudioChannel
AudioContext::MozAudioChannelType() const
{
return mDestination->MozAudioChannelType();
}
AudioChannel
AudioContext::TestAudioChannelInAudioNodeStream()
{
MediaStream* stream = mDestination->Stream();
MOZ_ASSERT(stream);
return stream->AudioChannelType();
}
size_t
AudioContext::SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
{

Просмотреть файл

@ -312,10 +312,6 @@ public:
JSObject* GetGlobalJSObject() const;
AudioChannel MozAudioChannelType() const;
AudioChannel TestAudioChannelInAudioNodeStream();
void RegisterNode(AudioNode* aNode);
void UnregisterNode(AudioNode* aNode);
@ -323,9 +319,6 @@ public:
BasicWaveFormCache* GetBasicWaveFormCache();
IMPL_EVENT_HANDLER(mozinterruptbegin)
IMPL_EVENT_HANDLER(mozinterruptend)
bool CheckClosed(ErrorResult& aRv);
void Dispatch(already_AddRefed<nsIRunnable>&& aRunnable);

Просмотреть файл

@ -545,9 +545,6 @@ AudioDestinationNode::WindowSuspendChanged(nsSuspendedTypes aSuspend)
"this = %p, aSuspend = %s\n", this, SuspendTypeToStr(aSuspend)));
mAudioChannelSuspended = suspended;
Context()->DispatchTrustedEvent(!suspended ?
NS_LITERAL_STRING("mozinterruptend") :
NS_LITERAL_STRING("mozinterruptbegin"));
DisabledTrackMode disabledMode = suspended ? DisabledTrackMode::SILENCE_BLACK
: DisabledTrackMode::ENABLED;

Просмотреть файл

@ -171,9 +171,6 @@ tags=capturestream
tags=capturestream
[test_mixingRules.html]
skip-if = toolkit == 'android' # bug 1091965
[test_mozaudiochannel.html]
# Android: bug 1061675; OSX 10.6: bug 1097721
skip-if = (toolkit == 'android') || (os == 'mac' && os_version == '10.6')
[test_nodeToParamConnection.html]
[test_nodeCreationDocumentGone.html]
[test_OfflineAudioContext.html]

Просмотреть файл

@ -1,151 +0,0 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test for mozaudiochannel</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/EventUtils.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<p id="display"></p>
<pre id="test">
<script type="application/javascript">
function test_basic() {
var ac = new AudioContext();
ok(ac, "AudioContext created");
// Default
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
// Unpermitted channels
ac = new AudioContext("content");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("notification");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("alarm");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("telephony");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("ringer");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("publicnotification");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
runTest();
}
function test_permission(aChannel) {
var ac = new AudioContext();
ok(ac, "AudioContext created");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
var channel = SpecialPowers.wrap(ac).testAudioChannelInAudioNodeStream();
is(channel, "normal", "AudioNodeStream is using the correct default audio channel.");
SpecialPowers.pushPermissions(
[{ "type": "audio-channel-" + aChannel, "allow": true, "context": document }],
function() {
var ac = new AudioContext(aChannel);
is(ac.mozAudioChannelType, aChannel, "Default ac channel == '" + aChannel + "'");
var channel = SpecialPowers.wrap(ac).testAudioChannelInAudioNodeStream();
is(channel, aChannel, "AudioNodeStream is using the correct new audio channel.");
runTest();
}
);
}
function test_preferences(aChannel) {
SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", aChannel ]]},
function() {
SpecialPowers.pushPermissions(
[{ "type": "audio-channel-" + aChannel, "allow": false, "context": document }],
function() {
var ac = new AudioContext(aChannel);
ok(ac, "AudioContext created");
is(ac.mozAudioChannelType, aChannel, "Default ac channel == '" + aChannel + "'");
var channel = SpecialPowers.wrap(ac).testAudioChannelInAudioNodeStream();
is(channel, aChannel, "AudioNodeStream is using the correct audio channel.");
runTest();
}
);
}
);
}
function test_wrong_preferences() {
SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", 'foobar' ]]},
function() {
var ac = new AudioContext();
ok(ac, "AudioContext created");
is(ac.mozAudioChannelType, 'normal', "Default ac channel == 'normal'");
runTest();
}
);
}
function test_testAudioChannelInAudioNodeStream() {
var ac = new AudioContext();
ok(ac, "AudioContext created");
var status = false;
try {
ac.testAudioChannelInAudioNodeStream();
} catch(e) {
status = true;
}
ok(status, "testAudioChannelInAudioNodeStream() should not exist in content.");
runTest();
}
var tests = [
test_basic,
function() { test_permission("content"); },
function() { test_permission("notification"); },
function() { test_permission("alarm"); },
function() { test_permission("telephony"); },
function() { test_permission("ringer"); },
function() { test_permission("publicnotification"); },
function() { test_preferences("content"); },
function() { test_preferences("notification"); },
function() { test_preferences("alarm"); },
function() { test_preferences("telephony"); },
function() { test_preferences("ringer"); },
function() { test_preferences("publicnotification"); },
test_wrong_preferences,
test_testAudioChannelInAudioNodeStream,
];
function runTest() {
if (!tests.length) {
SimpleTest.finish();
return;
}
var test = tests.shift();
test();
}
SpecialPowers.pushPrefEnv({"set": [["media.useAudioChannelAPI", true ]]}, runTest);
SimpleTest.waitForExplicitFinish();
SimpleTest.requestLongerTimeout(5);
</script>
</pre>
</body>
</html>

Просмотреть файл

@ -103,7 +103,7 @@ static char* GetVersion(void* verbuf)
HIWORD(fileInfo->dwFileVersionMS),
LOWORD(fileInfo->dwFileVersionMS),
HIWORD(fileInfo->dwFileVersionLS),
LOWORD(fileInfo->dwFileVersionLS));
LOWORD(fileInfo->dwFileVersionLS)).release();
}
return nullptr;

Просмотреть файл

@ -2028,7 +2028,7 @@ nsWebBrowserPersist::CalculateUniqueFilename(nsIURI *aURI)
if (base.IsEmpty() || duplicateCounter > 1)
{
char * tmp = mozilla::Smprintf("_%03d", duplicateCounter);
SmprintfPointer tmp = mozilla::Smprintf("_%03d", duplicateCounter);
NS_ENSURE_TRUE(tmp, NS_ERROR_OUT_OF_MEMORY);
if (filename.Length() < kDefaultMaxFilenameLength - 4)
{
@ -2038,8 +2038,7 @@ nsWebBrowserPersist::CalculateUniqueFilename(nsIURI *aURI)
{
base.Mid(tmpBase, 0, base.Length() - 4);
}
tmpBase.Append(tmp);
mozilla::SmprintfFree(tmp);
tmpBase.Append(tmp.get());
}
else
{

Просмотреть файл

@ -33,23 +33,4 @@ interface AudioContext : BaseAudioContext {
[NewObject, Throws]
MediaStreamAudioDestinationNode createMediaStreamDestination();
};
// Mozilla extensions
partial interface AudioContext {
// Read AudioChannel.webidl for more information about this attribute.
[Pref="media.useAudioChannelAPI"]
readonly attribute AudioChannel mozAudioChannelType;
// These 2 events are dispatched when the AudioContext object is muted by
// the AudioChannelService. It's call 'interrupt' because when this event is
// dispatched on a HTMLMediaElement, the audio stream is paused.
[Pref="media.useAudioChannelAPI"]
attribute EventHandler onmozinterruptbegin;
[Pref="media.useAudioChannelAPI"]
attribute EventHandler onmozinterruptend;
// This method is for test only.
[ChromeOnly] AudioChannel testAudioChannelInAudioNodeStream();
};
};

Просмотреть файл

@ -138,21 +138,6 @@ partial interface HTMLMediaElement {
// the media element has a fragment URI for the currentSrc, otherwise
// it is equal to the media duration.
readonly attribute double mozFragmentEnd;
// Mozilla extension: an audio channel type for media elements.
// Read AudioChannel.webidl for more information about this attribute.
[SetterThrows, Pref="media.useAudioChannelAPI"]
attribute AudioChannel mozAudioChannelType;
// In addition the media element has this new events:
// * onmozinterruptbegin - called when the media element is interrupted
// because of the audiochannel manager.
// * onmozinterruptend - called when the interruption is concluded
[Pref="media.useAudioChannelAPI"]
attribute EventHandler onmozinterruptbegin;
[Pref="media.useAudioChannelAPI"]
attribute EventHandler onmozinterruptend;
};
// Encrypted Media Extensions

Просмотреть файл

@ -26,10 +26,12 @@ interface Selection {
void removeRange(Range range);
[Throws]
void removeAllRanges();
//void empty();
[Throws, BinaryName="RemoveAllRanges"]
void empty();
[Throws, BinaryName="collapseJS"]
void collapse(Node? node, optional unsigned long offset = 0);
//void setPosition(Node? node, optional unsigned long offset = 0);
[Throws, BinaryName="collapseJS"]
void setPosition(Node? node, optional unsigned long offset = 0);
[Throws, BinaryName="collapseToStartJS"]
void collapseToStart();
[Throws, BinaryName="collapseToEndJS"]

Просмотреть файл

@ -8,7 +8,7 @@
#include "prmem.h"
#include "base/string_util.h"
#include "nsXPCOM.h"
#include "mozilla/Printf.h"
#include "mozilla/Move.h"
namespace mozilla {
@ -44,11 +44,9 @@ Logger::~Logger()
break;
}
MOZ_LOG(gChromiumPRLog, prlevel, ("%s:%i: %s", mFile, mLine, mMsg ? mMsg : "<no message>"));
MOZ_LOG(gChromiumPRLog, prlevel, ("%s:%i: %s", mFile, mLine, mMsg ? mMsg.get() : "<no message>"));
if (xpcomlevel != -1)
NS_DebugBreak(xpcomlevel, mMsg, NULL, mFile, mLine);
mozilla::SmprintfFree(mMsg);
NS_DebugBreak(xpcomlevel, mMsg.get(), NULL, mFile, mLine);
}
void
@ -56,7 +54,7 @@ Logger::printf(const char* fmt, ...)
{
va_list args;
va_start(args, fmt);
mMsg = mozilla::VsmprintfAppend(mMsg, fmt, args);
mMsg = mozilla::VsmprintfAppend(mozilla::Move(mMsg), fmt, args);
va_end(args);
}

Просмотреть файл

@ -13,6 +13,7 @@
#include "base/basictypes.h"
#include "mozilla/Attributes.h"
#include "mozilla/Logging.h"
#include "mozilla/Printf.h"
#ifdef NO_CHROMIUM_LOGGING
#include <sstream>
@ -39,7 +40,6 @@ public:
: mSeverity(severity)
, mFile(file)
, mLine(line)
, mMsg(NULL)
{ }
~Logger();
@ -54,7 +54,7 @@ private:
LogSeverity mSeverity;
const char* mFile;
int mLine;
char* mMsg;
SmprintfPointer mMsg;
DISALLOW_EVIL_CONSTRUCTORS(Logger);
};

Просмотреть файл

@ -31,11 +31,10 @@
#include "mozilla/StaticMutex.h"
#ifdef DEBUG
#define LOG_ERROR(str, args...) \
PR_BEGIN_MACRO \
char *msg = mozilla::Smprintf(str, ## args); \
NS_WARNING(msg); \
mozilla::SmprintfFree(msg); \
#define LOG_ERROR(str, args...) \
PR_BEGIN_MACRO \
mozilla::SmprintfPointer msg = mozilla::Smprintf(str, ## args); \
NS_WARNING(msg.get()); \
PR_END_MACRO
#else
#define LOG_ERROR(str, args...) do { /* nothing */ } while(0)

Просмотреть файл

@ -917,8 +917,6 @@ description =
description =
[PContent::KeygenProvideContent]
description =
[PContent::AllocateTabId]
description =
[PContent::GetGraphicsDeviceInitData]
description =
[PContent::CreateWindow]

Просмотреть файл

@ -1308,7 +1308,7 @@ NewUCollator(JSContext* cx, Handle<CollatorObject*> collator)
memcpy(newLocale + index, insert, insertLen);
memcpy(newLocale + index + insertLen, oldLocale + index, localeLen - index + 1); // '\0'
locale.clear();
locale.initBytes(newLocale);
locale.initBytes(JS::UniqueChars(newLocale));
} else {
MOZ_ASSERT(StringEqualsAscii(usage, "sort"));
}

Просмотреть файл

@ -2841,14 +2841,13 @@ GetBacktrace(JSContext* cx, unsigned argc, Value* vp)
showThisProps = ToBoolean(v);
}
char* buf = JS::FormatStackDump(cx, nullptr, showArgs, showLocals, showThisProps);
JS::UniqueChars buf = JS::FormatStackDump(cx, nullptr, showArgs, showLocals, showThisProps);
if (!buf)
return false;
RootedString str(cx);
if (!(str = JS_NewStringCopyZ(cx, buf)))
if (!(str = JS_NewStringCopyZ(cx, buf.get())))
return false;
JS_smprintf_free(buf);
args.rval().setString(str);
return true;

Просмотреть файл

@ -212,6 +212,7 @@ function treatAsSafeArgument(entry, varName, csuName)
["Gecko_DestroyShapeSource", "aShape", null],
["Gecko_StyleShapeSource_SetURLValue", "aShape", null],
["Gecko_nsFont_InitSystem", "aDest", null],
["Gecko_StyleTransition_SetUnsupportedProperty", "aTransition", null],
];
for (var [entryMatch, varMatch, csuMatch] of whitelist) {
assert(entryMatch || varMatch || csuMatch);

Просмотреть файл

@ -2313,21 +2313,21 @@ LiveRange::toString() const
{
AutoEnterOOMUnsafeRegion oomUnsafe;
char* buf = JS_smprintf("v%u [%u,%u)", hasVreg() ? vreg() : 0, from().bits(), to().bits());
UniqueChars buf = JS_smprintf("v%u [%u,%u)", hasVreg() ? vreg() : 0, from().bits(), to().bits());
if (buf && bundle() && !bundle()->allocation().isBogus())
buf = JS_sprintf_append(buf, " %s", bundle()->allocation().toString().get());
buf = JS_sprintf_append(Move(buf), " %s", bundle()->allocation().toString().get());
if (buf && hasDefinition())
buf = JS_sprintf_append(buf, " (def)");
buf = JS_sprintf_append(Move(buf), " (def)");
for (UsePositionIterator iter = usesBegin(); buf && iter; iter++)
buf = JS_sprintf_append(buf, " %s@%u", iter->use()->toString().get(), iter->pos.bits());
buf = JS_sprintf_append(Move(buf), " %s@%u", iter->use()->toString().get(), iter->pos.bits());
if (!buf)
oomUnsafe.crash("LiveRange::toString()");
return UniqueChars(buf);
return buf;
}
UniqueChars
@ -2336,10 +2336,10 @@ LiveBundle::toString() const
AutoEnterOOMUnsafeRegion oomUnsafe;
// Suppress -Wformat warning.
char *buf = JS_smprintf("%s", "");
UniqueChars buf = JS_smprintf("%s", "");
for (LiveRange::BundleLinkIterator iter = rangesBegin(); buf && iter; iter++) {
buf = JS_sprintf_append(buf, "%s %s",
buf = JS_sprintf_append(Move(buf), "%s %s",
(iter == rangesBegin()) ? "" : " ##",
LiveRange::get(*iter)->toString().get());
}
@ -2347,7 +2347,7 @@ LiveBundle::toString() const
if (!buf)
oomUnsafe.crash("LiveBundle::toString()");
return UniqueChars(buf);
return buf;
}
#endif // JS_JITSPEW

Просмотреть файл

@ -3325,12 +3325,11 @@ jit::Invalidate(JSContext* cx, JSScript* script, bool resetUses, bool cancelOffT
filename = "<unknown>";
// Construct the descriptive string.
char* buf = JS_smprintf("Invalidate %s:%" PRIuSIZE, filename, script->lineno());
UniqueChars buf = JS_smprintf("Invalidate %s:%" PRIuSIZE, filename, script->lineno());
// Ignore the event on allocation failure.
if (buf) {
cx->runtime()->geckoProfiler().markEvent(buf);
JS_smprintf_free(buf);
cx->runtime()->geckoProfiler().markEvent(buf.get());
}
}

Просмотреть файл

@ -376,26 +376,26 @@ LDefinition::toString() const
{
AutoEnterOOMUnsafeRegion oomUnsafe;
char* buf;
UniqueChars buf;
if (isBogusTemp()) {
buf = JS_smprintf("bogus");
} else {
buf = JS_smprintf("v%u<%s>", virtualRegister(), typeName(type()));
if (buf) {
if (policy() == LDefinition::FIXED)
buf = JS_sprintf_append(buf, ":%s", output()->toString().get());
buf = JS_sprintf_append(Move(buf), ":%s", output()->toString().get());
else if (policy() == LDefinition::MUST_REUSE_INPUT)
buf = JS_sprintf_append(buf, ":tied(%u)", getReusedInput());
buf = JS_sprintf_append(Move(buf), ":tied(%u)", getReusedInput());
}
}
if (!buf)
oomUnsafe.crash("LDefinition::toString()");
return UniqueChars(buf);
return buf;
}
static char*
static UniqueChars
PrintUse(const LUse* use)
{
switch (use->policy()) {
@ -420,7 +420,7 @@ LAllocation::toString() const
{
AutoEnterOOMUnsafeRegion oomUnsafe;
char* buf;
UniqueChars buf;
if (isBogus()) {
buf = JS_smprintf("bogus");
} else {
@ -452,7 +452,7 @@ LAllocation::toString() const
if (!buf)
oomUnsafe.crash("LAllocation::toString()");
return UniqueChars(buf);
return buf;
}
void

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше