Merge mozilla-central to fx-team on a CLOSED TREE

This commit is contained in:
Carsten "Tomcat" Book 2014-02-05 14:54:11 +01:00
Родитель 3dd756636e b377b7d2b5
Коммит d0390ec226
366 изменённых файлов: 49019 добавлений и 26103 удалений

Просмотреть файл

@ -189,25 +189,6 @@ this.Utils = {
},
getState: function getState(aAccessibleOrEvent) {
function State(aBase, aExtended) {
this.base = aBase;
this.extended = aExtended;
this.contains = (other) => {
return !!(this.base & other.base || this.extended & other.extended);
};
this.toString = () => {
let stateStrings = Utils.AccRetrieval.
getStringStates(this.base, this.extended);
let statesArray = new Array(stateStrings.length);
for (let i = 0; i < statesArray.length; i++) {
statesArray[i] = stateStrings.item(i);
}
return '[' + statesArray.join(', ') + ']';
};
}
if (aAccessibleOrEvent instanceof Ci.nsIAccessibleStateChangeEvent) {
return new State(
aAccessibleOrEvent.isExtraState ? 0 : aAccessibleOrEvent.state,
@ -330,6 +311,31 @@ this.Utils = {
}
};
/**
* State object used internally to process accessible's states.
* @param {Number} aBase Base state.
* @param {Number} aExtended Extended state.
*/
function State(aBase, aExtended) {
this.base = aBase;
this.extended = aExtended;
}
State.prototype = {
contains: function State_contains(other) {
return !!(this.base & other.base || this.extended & other.extended);
},
toString: function State_toString() {
let stateStrings = Utils.AccRetrieval.
getStringStates(this.base, this.extended);
let statesArray = new Array(stateStrings.length);
for (let i = 0; i < statesArray.length; i++) {
statesArray[i] = stateStrings.item(i);
}
return '[' + statesArray.join(', ') + ']';
}
};
this.Logger = {
DEBUG: 0,
INFO: 1,

Просмотреть файл

@ -12,13 +12,13 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="eda08beb3ba9a159843c70ffde0f9660ec351eb9"/>
<project name="platform_external_qemu" path="external/qemu" remote="b2g" revision="87aa8679560ce09f6445621d6f370d9de722cdba"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="e33ea242b4328fb0d1824c951f379332b5021512"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="fd67604d5932cce3617ff7ce725a0a686d129905"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="788d9ce293a9b44f64536130cf4ad577e8101dbe"/>
<!-- Stock Android things -->
<project name="platform/abi/cpp" path="abi/cpp" revision="dd924f92906085b831bf1cbbc7484d3c043d613c"/>
<project name="platform/bionic" path="bionic" revision="c72b8f6359de7ed17c11ddc9dfdde3f615d188a9"/>

Просмотреть файл

@ -11,10 +11,10 @@
</project>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="e33ea242b4328fb0d1824c951f379332b5021512"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="fd67604d5932cce3617ff7ce725a0a686d129905"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="788d9ce293a9b44f64536130cf4ad577e8101dbe"/>
<project name="valgrind" path="external/valgrind" remote="b2g" revision="905bfa3548eb75cf1792d0d8412b92113bbd4318"/>
<project name="vex" path="external/VEX" remote="b2g" revision="c3d7efc45414f1b44cd9c479bb2758c91c4707c0"/>
<!-- Stock Android things -->

Просмотреть файл

@ -12,13 +12,13 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="eda08beb3ba9a159843c70ffde0f9660ec351eb9"/>
<project name="platform_external_qemu" path="external/qemu" remote="b2g" revision="87aa8679560ce09f6445621d6f370d9de722cdba"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="e33ea242b4328fb0d1824c951f379332b5021512"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="fd67604d5932cce3617ff7ce725a0a686d129905"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="788d9ce293a9b44f64536130cf4ad577e8101dbe"/>
<!-- Stock Android things -->
<project name="platform/abi/cpp" path="abi/cpp" revision="dd924f92906085b831bf1cbbc7484d3c043d613c"/>
<project name="platform/bionic" path="bionic" revision="c72b8f6359de7ed17c11ddc9dfdde3f615d188a9"/>

Просмотреть файл

@ -1,4 +1,4 @@
{
"revision": "a4d9c73e176e7dfc9a32d362f0f5e5cb5c21e323",
"revision": "cea79abbb7a97c0bd67051087bcdf40d25611930",
"repo_path": "/integration/gaia-central"
}

Просмотреть файл

@ -11,12 +11,12 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="e33ea242b4328fb0d1824c951f379332b5021512"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="fd67604d5932cce3617ff7ce725a0a686d129905"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="788d9ce293a9b44f64536130cf4ad577e8101dbe"/>
<!-- Stock Android things -->
<project name="platform/abi/cpp" path="abi/cpp" revision="6426040f1be4a844082c9769171ce7f5341a5528"/>
<project name="platform/bionic" path="bionic" revision="d2eb6c7b6e1bc7643c17df2d9d9bcb1704d0b9ab"/>

Просмотреть файл

@ -10,7 +10,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>

Просмотреть файл

@ -12,12 +12,12 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="e33ea242b4328fb0d1824c951f379332b5021512"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="fd67604d5932cce3617ff7ce725a0a686d129905"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="788d9ce293a9b44f64536130cf4ad577e8101dbe"/>
<!-- Stock Android things -->
<project name="platform/abi/cpp" path="abi/cpp" revision="6426040f1be4a844082c9769171ce7f5341a5528"/>
<project name="platform/bionic" path="bionic" revision="cd5dfce80bc3f0139a56b58aca633202ccaee7f8"/>

Просмотреть файл

@ -11,12 +11,12 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="e33ea242b4328fb0d1824c951f379332b5021512"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="fd67604d5932cce3617ff7ce725a0a686d129905"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="788d9ce293a9b44f64536130cf4ad577e8101dbe"/>
<project name="gonk-patches" path="patches" remote="b2g" revision="223a2421006e8f5da33f516f6891c87cae86b0f6"/>
<!-- Stock Android things -->
<project name="platform/abi/cpp" path="abi/cpp" revision="6426040f1be4a844082c9769171ce7f5341a5528"/>

Просмотреть файл

@ -11,10 +11,10 @@
</project>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="e33ea242b4328fb0d1824c951f379332b5021512"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="fd67604d5932cce3617ff7ce725a0a686d129905"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="788d9ce293a9b44f64536130cf4ad577e8101dbe"/>
<project name="valgrind" path="external/valgrind" remote="b2g" revision="905bfa3548eb75cf1792d0d8412b92113bbd4318"/>
<project name="vex" path="external/VEX" remote="b2g" revision="c3d7efc45414f1b44cd9c479bb2758c91c4707c0"/>
<!-- Stock Android things -->

Просмотреть файл

@ -11,12 +11,12 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ac94739a01d64e86890f54f1795ff8e82b8e7939"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="ce212ba54f36284db84068f82af0c790ceb2c3ff"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="e9b6626eddbc85873eaa2a9174a9bd5101e5c05f"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="84f2f2fce22605e17d511ff1767e54770067b5b5"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="e33ea242b4328fb0d1824c951f379332b5021512"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="fd67604d5932cce3617ff7ce725a0a686d129905"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="788d9ce293a9b44f64536130cf4ad577e8101dbe"/>
<project name="gonk-patches" path="patches" remote="b2g" revision="223a2421006e8f5da33f516f6891c87cae86b0f6"/>
<!-- Stock Android things -->
<project name="platform/abi/cpp" path="abi/cpp" revision="6426040f1be4a844082c9769171ce7f5341a5528"/>

Просмотреть файл

@ -5943,6 +5943,10 @@ function WindowIsClosing()
if (!closeWindow(false, warnAboutClosingWindow))
return false;
// Bug 967873 - Proxy nsDocumentViewer::PermitUnload to the child process
if (gMultiProcessBrowser)
return true;
for (let browser of gBrowser.browsers) {
let ds = browser.docShell;
if (ds.contentViewer && !ds.contentViewer.permitUnload())

Просмотреть файл

@ -65,7 +65,7 @@
frame1.docShell.chromeEventHandler.removeAttribute("crashedPageTitle");
SimpleTest.is(frame1.contentDocument.documentURI,
"about:tabcrashed?e=tabcrashed&u=http%3A//www.example.com/1&c=UTF-8&d=pageTitle&f=regular",
"about:tabcrashed?e=tabcrashed&u=http%3A//www.example.com/1&c=UTF-8&f=regular&d=pageTitle",
"Correct about:tabcrashed displayed for page with title.");
errorPageReady = waitForErrorPage(frame2);
@ -74,7 +74,7 @@
yield errorPageReady;
SimpleTest.is(frame2.contentDocument.documentURI,
"about:tabcrashed?e=tabcrashed&u=http%3A//www.example.com/2&c=UTF-8&d=%20&f=regular",
"about:tabcrashed?e=tabcrashed&u=http%3A//www.example.com/2&c=UTF-8&f=regular&d=%20",
"Correct about:tabcrashed displayed for page with no title.");
SimpleTest.finish();

Просмотреть файл

@ -19,7 +19,7 @@ function test() {
ok(aDebuggee, "Should have a debuggee available.");
ok(aPanel, "Should have a debugger pane available.");
waitForSourceAndCaretAndScopes(aPanel, "-02.js", 6).then(() => {
waitForSourceAndCaretAndScopes(aPanel, "-02.js", 1).then(() => {
resumeDebuggerThenCloseAndFinish(aPanel);
});

Просмотреть файл

@ -38,7 +38,7 @@ function testFirstPage() {
// this function to return first.
executeSoon(() => gDebuggee.firstCall());
return waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6).then(() => {
return waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1).then(() => {
validateFirstPage();
});
}

Просмотреть файл

@ -19,7 +19,7 @@ function test() {
gSources = gDebugger.DebuggerView.Sources;
gFrames = gDebugger.DebuggerView.StackFrames;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(checkNavigationWhileNotFocused)
.then(focusCurrentStackFrame)
.then(checkNavigationWhileFocused)
@ -32,13 +32,13 @@ function test() {
});
function checkNavigationWhileNotFocused() {
checkState({ frame: 3, source: 1, line: 6 });
checkState({ frame: 3, source: 1, line: 1 });
EventUtils.sendKey("DOWN", gDebugger);
checkState({ frame: 3, source: 1, line: 7 });
checkState({ frame: 3, source: 1, line: 2 });
EventUtils.sendKey("UP", gDebugger);
checkState({ frame: 3, source: 1, line: 6 });
checkState({ frame: 3, source: 1, line: 1 });
}
function focusCurrentStackFrame() {
@ -53,14 +53,14 @@ function test() {
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.FETCHED_SCOPES),
EventUtils.sendKey("UP", gDebugger)
]);
checkState({ frame: 2, source: 1, line: 6 });
checkState({ frame: 2, source: 1, line: 1 });
yield promise.all([
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.FETCHED_SCOPES),
waitForSourceAndCaret(gPanel, "-01.js", 5),
waitForSourceAndCaret(gPanel, "-01.js", 1),
EventUtils.sendKey("UP", gDebugger)
]);
checkState({ frame: 1, source: 0, line: 5 });
checkState({ frame: 1, source: 0, line: 1 });
yield promise.all([
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.FETCHED_SCOPES),
@ -70,14 +70,14 @@ function test() {
yield promise.all([
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.FETCHED_SCOPES),
waitForSourceAndCaret(gPanel, "-02.js", 6),
waitForSourceAndCaret(gPanel, "-02.js", 1),
EventUtils.sendKey("END", gDebugger)
]);
checkState({ frame: 3, source: 1, line: 6 });
checkState({ frame: 3, source: 1, line: 1 });
yield promise.all([
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.FETCHED_SCOPES),
waitForSourceAndCaret(gPanel, "-01.js", 5),
waitForSourceAndCaret(gPanel, "-01.js", 1),
EventUtils.sendKey("HOME", gDebugger)
]);
checkState({ frame: 0, source: 0, line: 5 });

Просмотреть файл

@ -23,7 +23,7 @@ function test() {
gBreakpointsAdded = gBreakpoints._added;
gBreakpointsRemoving = gBreakpoints._removing;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6).then(performTest);
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1).then(performTest);
gDebuggee.firstCall();
});

Просмотреть файл

@ -73,7 +73,7 @@ function test() {
yield verifyView({ disabled: false, visible: true });
executeSoon(() => gDebugger.gThreadClient.resume());
yield waitForSourceAndCaretAndScopes(aPanel, "-02.js", 6);
yield waitForSourceAndCaretAndScopes(aPanel, "-02.js", 1);
yield verifyView({ disabled: false, visible: false });
});
}
@ -84,11 +84,11 @@ function test() {
yield verifyView({ disabled: false, visible: false });
executeSoon(() => aDebuggee.firstCall());
yield waitForSourceAndCaretAndScopes(aPanel, "-01.js", 5);
yield waitForSourceAndCaretAndScopes(aPanel, "-01.js", 1);
yield verifyView({ disabled: false, visible: true });
executeSoon(() => gDebugger.gThreadClient.resume());
yield waitForSourceAndCaretAndScopes(aPanel, "-02.js", 6);
yield waitForSourceAndCaretAndScopes(aPanel, "-02.js", 1);
yield verifyView({ disabled: false, visible: false });
});
}
@ -101,13 +101,13 @@ function test() {
executeSoon(() => aDebuggee.firstCall());
yield waitForDebuggerEvents(aPanel, gEvents.FETCHED_SCOPES);
yield ensureSourceIs(aPanel, "-02.js");
yield ensureCaretAt(aPanel, 6);
yield ensureCaretAt(aPanel, 1);
yield verifyView({ disabled: true, visible: false });
executeSoon(() => gDebugger.gThreadClient.resume());
yield waitForDebuggerEvents(aPanel, gEvents.AFTER_FRAMES_CLEARED);
yield ensureSourceIs(aPanel, "-02.js");
yield ensureCaretAt(aPanel, 6);
yield ensureCaretAt(aPanel, 1);
yield verifyView({ disabled: true, visible: false });
});
}

Просмотреть файл

@ -23,7 +23,7 @@ function test() {
gBreakpointsAdded = gBreakpoints._added;
gBreakpointsRemoving = gBreakpoints._removing;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6).then(performTest);
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1).then(performTest);
gDebuggee.firstCall();
});

Просмотреть файл

@ -23,7 +23,7 @@ function test() {
gBreakpointsAdded = gBreakpoints._added;
gBreakpointsRemoving = gBreakpoints._removing;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6).then(performTest);
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1).then(performTest);
gDebuggee.firstCall();
});

Просмотреть файл

@ -48,7 +48,7 @@ function test() {
// this function to return first.
executeSoon(() => aDebuggee.firstCall());
return waitForSourceAndCaretAndScopes(aPanel, ".html", 17).then(() => {
return waitForSourceAndCaretAndScopes(aPanel, ".html", 1).then(() => {
gPanel = aPanel;
gDebugger = gPanel.panelWin;
gThreadClient = gPanel.panelWin.gThreadClient;
@ -58,7 +58,7 @@ function test() {
},
post: function() {
ok(gThreadClient, "Debugger client exists.");
is(gLineNumber, 14, "gLineNumber is correct.");
is(gLineNumber, 1, "gLineNumber is correct.");
},
},
{
@ -74,7 +74,7 @@ function test() {
message: '',
args: {
file: { value: TAB_URL, message: '' },
line: { value: 14 }
line: { value: 1 }
}
},
exec: {
@ -109,8 +109,8 @@ function test() {
exec: {
output: [
/Source/, /Remove/,
/doc_cmd-break\.html:14/,
/doc_cmd-break\.html:17/
/doc_cmd-break\.html:1/,
/doc_cmd-break\.html:1/
]
}
},
@ -123,11 +123,11 @@ function test() {
}
},
{
setup: 'break del 14',
setup: 'break del 1',
check: {
input: 'break del 14',
hints: ' -> doc_cmd-break.html:14',
markup: 'VVVVVVVVVVII',
input: 'break del 1',
hints: ' -> doc_cmd-break.html:1',
markup: 'VVVVVVVVVVI',
status: 'ERROR',
args: {
breakpoint: {
@ -138,14 +138,14 @@ function test() {
}
},
{
setup: 'break del doc_cmd-break.html:14',
setup: 'break del doc_cmd-break.html:1',
check: {
input: 'break del doc_cmd-break.html:14',
input: 'break del doc_cmd-break.html:1',
hints: '',
markup: 'VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV',
markup: 'VVVVVVVVVVVVVVVVVVVVVVVVVVVVVV',
status: 'VALID',
args: {
breakpoint: { arg: ' doc_cmd-break.html:14' },
breakpoint: { arg: ' doc_cmd-break.html:1' },
}
},
exec: {

Просмотреть файл

@ -47,8 +47,8 @@ function test() {
// Allow this generator function to yield first.
executeSoon(() => debuggee.firstCall());
yield waitForSourceAndCaretAndScopes(panel, "-02.js", 6);
checkView(0, 1, 6, [/secondCall/, 118]);
yield waitForSourceAndCaretAndScopes(panel, "-02.js", 1);
checkView(0, 1, 1, [/secondCall/, 118]);
// Eval in the topmost frame, while paused.
let updatedView = waitForDebuggerEvents(panel, events.FETCHED_SCOPES);
@ -58,7 +58,7 @@ function test() {
is(result.return.class, "Function", "The evaluation return class is correct.");
yield updatedView;
checkView(0, 1, 6, [/secondCall/, 118]);
checkView(0, 1, 1, [/secondCall/, 118]);
ok(true, "Evaluating in the topmost frame works properly.");
// Eval in a different frame, while paused.
@ -72,7 +72,7 @@ function test() {
}
yield updatedView;
checkView(0, 1, 6, [/secondCall/, 118]);
checkView(0, 1, 1, [/secondCall/, 118]);
ok(true, "Evaluating in a custom frame works properly.");
// Eval in a non-existent frame, while paused.

Просмотреть файл

@ -39,8 +39,8 @@ function test() {
// Allow this generator function to yield first.
executeSoon(() => debuggee.firstCall());
yield waitForSourceAndCaretAndScopes(panel, "-02.js", 6);
checkView(0, 1, 6, [/secondCall/, 118]);
yield waitForSourceAndCaretAndScopes(panel, "-02.js", 1);
checkView(0, 1, 1, [/secondCall/, 118]);
// Change the selected frame and eval inside it.
let updatedFrame = waitForDebuggerEvents(panel, events.FETCHED_SCOPES);

Просмотреть файл

@ -20,7 +20,7 @@ function test() {
gSources = gDebugger.DebuggerView.Sources;
gContextMenu = gDebugger.document.getElementById("sourceEditorContextMenu");
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6).then(performTest).then(null, info);
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1).then(performTest).then(null, info);
gDebuggee.firstCall();
});

Просмотреть файл

@ -20,7 +20,7 @@ function test() {
gEditor = gDebugger.DebuggerView.editor;
gSources = gDebugger.DebuggerView.Sources;
waitForSourceAndCaretAndScopes(gPanel, "code_test-editor-mode", 5)
waitForSourceAndCaretAndScopes(gPanel, "code_test-editor-mode", 1)
.then(testInitialSource)
.then(testSwitch1)
.then(testSwitch2)

Просмотреть файл

@ -22,7 +22,7 @@ function test() {
ok(gDebugger.document.title.endsWith(EXAMPLE_URL + gLabel1),
"Title with first source is correct.");
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(testSourcesDisplay)
.then(testSwitchPaused1)
.then(testSwitchPaused2)
@ -75,15 +75,15 @@ function testSourcesDisplay() {
ok(gDebugger.document.title.endsWith(EXAMPLE_URL + gLabel2),
"Title with second source is correct.");
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"Editor caret location is correct.");
// The editor's debug location takes a tick to update.
executeSoon(() => {
is(gEditor.getDebugLocation(), 5,
is(gEditor.getDebugLocation(), 0,
"Editor debugger location is correct.");
ok(gEditor.hasLineClass(5, "debug-line"),
"The debugged line is highlighted appropriately.");
ok(gEditor.hasLineClass(0, "debug-line"),
"The debugged line is highlighted appropriately (1).");
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.SOURCE_SHOWN).then(deferred.resolve);
gSources.selectedIndex = 0;
@ -136,12 +136,12 @@ function testSwitchPaused2() {
// The editor's debug location takes a tick to update.
executeSoon(() => {
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"Editor caret location is correct.");
is(gEditor.getDebugLocation(), 5,
is(gEditor.getDebugLocation(), 0,
"Editor debugger location is correct.");
ok(gEditor.hasLineClass(5, "debug-line"),
"The debugged line is highlighted appropriately.");
ok(gEditor.hasLineClass(0, "debug-line"),
"The debugged line is highlighted appropriately (2).");
// Step out three times.
waitForThreadEvents(gPanel, "paused").then(() => {
@ -172,12 +172,12 @@ function testSwitchRunning() {
// The editor's debug location takes a tick to update.
executeSoon(() => {
ok(isCaretPos(gPanel, 5),
ok(isCaretPos(gPanel, 1),
"Editor caret location is correct.");
is(gEditor.getDebugLocation(), 4,
is(gEditor.getDebugLocation(), 0,
"Editor debugger location is correct.");
ok(gEditor.hasLineClass(4, "debug-line"),
"The debugged line is highlighted appropriately.");
ok(gEditor.hasLineClass(0, "debug-line"),
"The debugged line is highlighted appropriately (3).");
deferred.resolve();
});

Просмотреть файл

@ -19,7 +19,7 @@ function test() {
gEditor = gDebugger.DebuggerView.editor;
gSources = gDebugger.DebuggerView.Sources;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(testSourcesDisplay)
.then(testSwitchPaused1)
.then(testSwitchPaused2)
@ -63,14 +63,14 @@ function testSourcesDisplay() {
is(gEditor.getText().search(/debugger/), 172,
"The second source is displayed.");
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"Editor caret location is correct.");
// The editor's debug location takes a tick to update.
executeSoon(() => {
is(gEditor.getDebugLocation(), 5,
is(gEditor.getDebugLocation(), 0,
"Editor debugger location is correct.");
ok(gEditor.hasLineClass(5, "debug-line"),
ok(gEditor.hasLineClass(0, "debug-line"),
"The debugged line is highlighted appropriately.");
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.SOURCE_SHOWN).then(deferred.resolve);
@ -125,11 +125,11 @@ function testSwitchPaused2() {
// The editor's debug location takes a tick to update.
executeSoon(() => {
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"Editor caret location is correct.");
is(gEditor.getDebugLocation(), 5,
is(gEditor.getDebugLocation(), 0,
"Editor debugger location is correct.");
ok(gEditor.hasLineClass(5, "debug-line"),
ok(gEditor.hasLineClass(0, "debug-line"),
"The debugged line is highlighted appropriately.");
// Step out three times.
@ -161,11 +161,11 @@ function testSwitchRunning() {
// The editor's debug location takes a tick to update.
executeSoon(() => {
ok(isCaretPos(gPanel, 5),
ok(isCaretPos(gPanel, 1),
"Editor caret location is correct.");
is(gEditor.getDebugLocation(), 4,
is(gEditor.getDebugLocation(), 0,
"Editor debugger location is correct.");
ok(gEditor.hasLineClass(4, "debug-line"),
ok(gEditor.hasLineClass(0, "debug-line"),
"The debugged line is highlighted appropriately.");
deferred.resolve();

Просмотреть файл

@ -19,7 +19,7 @@ function test() {
gSources = gDebugger.DebuggerView.Sources;
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(performSimpleSearch)
.then(() => verifySourceAndCaret("-01.js", 1, 1, [1, 1]))
.then(combineWithLineSearch)
@ -40,7 +40,7 @@ function test() {
function performSimpleSearch() {
let finished = promise.all([
ensureSourceIs(gPanel, "-02.js"),
ensureCaretAt(gPanel, 6),
ensureCaretAt(gPanel, 1),
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.FILE_SEARCH_MATCH_FOUND),
waitForSourceShown(gPanel, "-01.js")
]);

Просмотреть файл

@ -20,7 +20,7 @@ function test() {
gSources = gDebugger.DebuggerView.Sources;
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(performFileSearch)
.then(escapeAndHide)
.then(escapeAndClear)
@ -44,7 +44,7 @@ function test() {
function performFileSearch() {
let finished = promise.all([
ensureSourceIs(gPanel, "-02.js"),
ensureCaretAt(gPanel, 6),
ensureCaretAt(gPanel, 1),
once(gDebugger, "popupshown"),
waitForDebuggerEvents(gPanel, gDebugger.EVENTS.FILE_SEARCH_MATCH_FOUND),
waitForSourceShown(gPanel, "-01.js")

Просмотреть файл

@ -22,7 +22,7 @@ function test() {
gSearchView = gDebugger.DebuggerView.GlobalSearch;
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(firstSearch)
.then(secondSearch)
.then(clearSearch)
@ -52,7 +52,7 @@ function firstSearch() {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"The editor shouldn't have jumped to a matching line yet.");
ok(gSources.selectedValue.contains("-02.js"),
"The current source shouldn't have changed after a global search.");
@ -166,7 +166,7 @@ function secondSearch() {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"The editor shouldn't have jumped to a matching line yet.");
ok(gSources.selectedValue.contains("-02.js"),
"The current source shouldn't have changed after a global search.");

Просмотреть файл

@ -22,7 +22,7 @@ function test() {
gSearchView = gDebugger.DebuggerView.GlobalSearch;
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(firstSearch)
.then(doFirstJump)
.then(doSecondJump)
@ -55,7 +55,7 @@ function firstSearch() {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"The editor shouldn't have jumped to a matching line yet.");
ok(gSources.selectedValue.contains("-02.js"),
"The current source shouldn't have changed after a global search.");
@ -74,7 +74,7 @@ function firstSearch() {
function doFirstJump() {
let deferred = promise.defer();
waitForSourceAndCaret(gPanel, "-01.js", 5).then(() => {
waitForSourceAndCaret(gPanel, "-01.js", 1).then(() => {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
@ -102,7 +102,7 @@ function doFirstJump() {
function doSecondJump() {
let deferred = promise.defer();
waitForSourceAndCaret(gPanel, "-02.js", 6).then(() => {
waitForSourceAndCaret(gPanel, "-02.js", 1).then(() => {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
@ -130,7 +130,7 @@ function doSecondJump() {
function doWrapAroundJump() {
let deferred = promise.defer();
waitForSourceAndCaret(gPanel, "-01.js", 5).then(() => {
waitForSourceAndCaret(gPanel, "-01.js", 1).then(() => {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
@ -158,7 +158,7 @@ function doWrapAroundJump() {
function doBackwardsWrapAroundJump() {
let deferred = promise.defer();
waitForSourceAndCaret(gPanel, "-02.js", 6).then(() => {
waitForSourceAndCaret(gPanel, "-02.js", 1).then(() => {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());

Просмотреть файл

@ -22,7 +22,7 @@ function test() {
gSearchView = gDebugger.DebuggerView.GlobalSearch;
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(firstSearch)
.then(performTest)
.then(() => closeDebuggerAndFinish(gPanel))
@ -51,7 +51,7 @@ function firstSearch() {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"The editor shouldn't have jumped to a matching line yet.");
ok(gSources.selectedValue.contains("-02.js"),
"The current source shouldn't have changed after a global search.");

Просмотреть файл

@ -22,7 +22,7 @@ function test() {
gSearchView = gDebugger.DebuggerView.GlobalSearch;
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(firstSearch)
.then(secondSearch)
.then(() => resumeDebuggerThenCloseAndFinish(gPanel))
@ -44,7 +44,7 @@ function firstSearch() {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"The editor shouldn't have jumped to a matching line yet.");
ok(gSources.selectedValue.contains("-02.js"),
"The current source shouldn't have changed after a global search.");
@ -67,7 +67,7 @@ function secondSearch() {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"The editor shouldn't have jumped to a matching line yet.");
ok(gSources.selectedValue.contains("-02.js"),
"The current source shouldn't have changed after a global search.");

Просмотреть файл

@ -23,7 +23,7 @@ function test() {
gSearchView = gDebugger.DebuggerView.GlobalSearch;
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(doSearch)
.then(testExpandCollapse)
.then(testClickLineToJump)
@ -47,7 +47,7 @@ function doSearch() {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"The editor shouldn't have jumped to a matching line yet.");
ok(gSources.selectedValue.contains("-02.js"),
"The current source shouldn't have changed after a global search.");
@ -94,13 +94,13 @@ function testClickLineToJump() {
let firstHeader = sourceResults[0].querySelector(".dbg-results-header");
let firstLine = sourceResults[0].querySelector(".dbg-results-line-contents");
waitForSourceAndCaret(gPanel, "-01.js", 1, 5).then(() => {
waitForSourceAndCaret(gPanel, "-01.js", 1, 1).then(() => {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 1, 5),
ok(isCaretPos(gPanel, 1, 1),
"The editor didn't jump to the correct line (1).");
is(gEditor.getSelection(), "A",
is(gEditor.getSelection(), "",
"The editor didn't select the correct text (1).");
ok(gSources.selectedValue.contains("-01.js"),
"The currently shown source is incorrect (1).");
@ -123,13 +123,13 @@ function testClickMatchToJump() {
let secondMatches = sourceResults[1].querySelectorAll(".dbg-results-line-contents-string[match=true]");
let lastMatch = Array.slice(secondMatches).pop();
waitForSourceAndCaret(gPanel, "-02.js", 6, 6).then(() => {
waitForSourceAndCaret(gPanel, "-02.js", 1, 1).then(() => {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6, 6),
ok(isCaretPos(gPanel, 1, 1),
"The editor didn't jump to the correct line (2).");
is(gEditor.getSelection(), "a",
is(gEditor.getSelection(), "",
"The editor didn't select the correct text (2).");
ok(gSources.selectedValue.contains("-02.js"),
"The currently shown source is incorrect (2).");

Просмотреть файл

@ -22,7 +22,7 @@ function test() {
gSearchView = gDebugger.DebuggerView.GlobalSearch;
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(doSearch)
.then(testFocusLost)
.then(doSearch)
@ -53,7 +53,7 @@ function doSearch() {
info("Current source url:\n" + gSources.selectedValue);
info("Debugger editor text:\n" + gEditor.getText());
ok(isCaretPos(gPanel, 6),
ok(isCaretPos(gPanel, 1),
"The editor shouldn't have jumped to a matching line yet.");
ok(gSources.selectedValue.contains("-02.js"),
"The current source shouldn't have changed after a global search.");

Просмотреть файл

@ -20,7 +20,7 @@ function test() {
gSearchBox = gDebugger.DebuggerView.Filtering._searchbox;
gSearchBoxPanel = gDebugger.DebuggerView.Filtering._searchboxHelpPanel;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(showPopup)
.then(hidePopup)
.then(() => resumeDebuggerThenCloseAndFinish(gPanel))

Просмотреть файл

@ -25,7 +25,7 @@ function test() {
ok(false, "Damn it, this shouldn't have happened.");
});
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(tryShowPopup)
.then(focusEditor)
.then(testFocusLost)
@ -64,9 +64,9 @@ function focusEditor() {
}
function testFocusLost() {
ok(isCaretPos(gPanel, 6, 1),
ok(isCaretPos(gPanel, 1, 1),
"The editor caret position appears to be correct after gaining focus.");
ok(isEditorSel(gPanel, [165, 165]),
ok(isEditorSel(gPanel, [1, 1]),
"The editor selection appears to be correct after gaining focus.");
is(gEditor.getSelection(), "",
"The editor selected text appears to be correct after gaining focus.");

Просмотреть файл

@ -19,7 +19,7 @@ function test() {
gFrames = gDebugger.DebuggerView.StackFrames;
gClassicFrames = gDebugger.DebuggerView.StackFramesClassicList;
waitForSourceAndCaretAndScopes(gPanel, ".html", 18).then(performTest);
waitForSourceAndCaretAndScopes(gPanel, ".html", 1).then(performTest);
gDebuggee.evalCall();
});
}

Просмотреть файл

@ -19,7 +19,7 @@ function test() {
gFrames = gDebugger.DebuggerView.StackFrames;
gClassicFrames = gDebugger.DebuggerView.StackFramesClassicList;
waitForSourceAndCaretAndScopes(gPanel, ".html", 18).then(performTest);
waitForSourceAndCaretAndScopes(gPanel, ".html", 1).then(performTest);
gDebuggee.evalCall();
});
}

Просмотреть файл

@ -22,7 +22,7 @@ function test() {
gFrames = gDebugger.DebuggerView.StackFrames;
gClassicFrames = gDebugger.DebuggerView.StackFramesClassicList;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6)
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1)
.then(initialChecks)
.then(testNewestTwoFrames)
.then(testOldestTwoFrames)
@ -54,12 +54,12 @@ function testNewestTwoFrames() {
"Newest frame should be selected in the mirrored view as well.");
is(gSources.selectedIndex, 1,
"The second source is selected in the widget.");
ok(isCaretPos(gPanel, 6),
"Editor caret location is correct.");
ok(isCaretPos(gPanel, 1),
"Editor caret location is correct (1).");
// The editor's debug location takes a tick to update.
executeSoon(() => {
is(gEditor.getDebugLocation(), 5,
is(gEditor.getDebugLocation(), 0,
"Editor debug location is correct.");
EventUtils.sendMouseEvent({ type: "mousedown" },
@ -73,7 +73,7 @@ function testNewestTwoFrames() {
is(gSources.selectedIndex, 1,
"The second source is still selected in the widget.");
ok(isCaretPos(gPanel, 6),
"Editor caret location is correct.");
"Editor caret location is correct (2).");
// The editor's debug location takes a tick to update.
executeSoon(() => {
@ -90,19 +90,19 @@ function testNewestTwoFrames() {
function testOldestTwoFrames() {
let deferred = promise.defer();
waitForSourceAndCaret(gPanel, "-01.js", 5).then(waitForTick).then(() => {
waitForSourceAndCaret(gPanel, "-01.js", 1).then(waitForTick).then(() => {
is(gFrames.selectedIndex, 1,
"Second frame should be selected after click.");
is(gClassicFrames.selectedIndex, 2,
"Second frame should be selected in the mirrored view as well.");
is(gSources.selectedIndex, 0,
"The first source is now selected in the widget.");
ok(isCaretPos(gPanel, 5),
"Editor caret location is correct.");
ok(isCaretPos(gPanel, 1),
"Editor caret location is correct (3).");
// The editor's debug location takes a tick to update.
executeSoon(() => {
is(gEditor.getDebugLocation(), 4,
is(gEditor.getDebugLocation(), 0,
"Editor debug location is correct.");
EventUtils.sendMouseEvent({ type: "mousedown" },
@ -116,7 +116,7 @@ function testOldestTwoFrames() {
is(gSources.selectedIndex, 0,
"The first source is still selected in the widget.");
ok(isCaretPos(gPanel, 5),
"Editor caret location is correct.");
"Editor caret location is correct (4).");
// The editor's debug location takes a tick to update.
executeSoon(() => {

Просмотреть файл

@ -22,7 +22,7 @@ function test() {
gFrames = gDebugger.DebuggerView.StackFrames;
gClassicFrames = gDebugger.DebuggerView.StackFramesClassicList;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6).then(performTest);
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1).then(performTest);
gDebuggee.firstCall();
});
}
@ -39,7 +39,7 @@ function performTest() {
is(gEditor.getText().search(/debugger/), 172,
"The second source is displayed.");
waitForSourceAndCaret(gPanel, "-01.js", 6).then(waitForTick).then(() => {
waitForSourceAndCaret(gPanel, "-01.js", 1).then(waitForTick).then(() => {
is(gFrames.selectedIndex, 0,
"Oldest frame should be selected after click.");
is(gClassicFrames.selectedIndex, 3,
@ -51,7 +51,7 @@ function performTest() {
is(gEditor.getText().search(/debugger/), -1,
"The second source is not displayed.");
waitForSourceAndCaret(gPanel, "-02.js", 6).then(waitForTick).then(() => {
waitForSourceAndCaret(gPanel, "-02.js", 1).then(waitForTick).then(() => {
is(gFrames.selectedIndex, 3,
"Newest frame should be selected again after click.");
is(gClassicFrames.selectedIndex, 0,

Просмотреть файл

@ -24,7 +24,7 @@ function test() {
gClassicFrames = gDebugger.DebuggerView.StackFramesClassicList;
gToolbar = gDebugger.DebuggerView.Toolbar;
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 6).then(performTest);
waitForSourceAndCaretAndScopes(gPanel, "-02.js", 1).then(performTest);
gDebuggee.firstCall();
});
}
@ -32,7 +32,7 @@ function test() {
function performTest() {
return Task.spawn(function() {
yield selectBottomFrame();
testBottomFrame(5);
testBottomFrame(0);
yield performStep("StepOver");
testTopFrame(3);

Просмотреть файл

@ -2575,13 +2575,24 @@ toolbarbutton.chevron > .toolbarbutton-menu-dropmarker {
url(chrome://browser/skin/tabbrowser/tab-background-start@2x.png);
}
.tabbrowser-tab:hover > .tab-stack > .tab-background:not([selected=true]),
.tabs-newtab-button:hover {
background-image: url(chrome://browser/skin/tabbrowser/tab-background-start@2x.png),
url(chrome://browser/skin/tabbrowser/tab-background-middle@2x.png),
url(chrome://browser/skin/tabbrowser/tab-background-end@2x.png);
}
.tabbrowser-tab:hover > .tab-stack > .tab-background > .tab-background-middle:not([selected=true]) {
background-image: url(chrome://browser/skin/tabbrowser/tab-background-middle@2x.png);
}
.tabbrowser-tab:hover > .tab-stack > .tab-background > .tab-background-start:not([selected=true]) {
background-image: url(chrome://browser/skin/tabbrowser/tab-background-start@2x.png);
}
.tabbrowser-tab:hover > .tab-stack > .tab-background > .tab-background-end:not([selected=true]) {
background-image: url(chrome://browser/skin/tabbrowser/tab-background-end@2x.png);
}
.tab-background-middle[selected=true] {
background-image: url(chrome://browser/skin/tabbrowser/tab-active-middle@2x.png),
@fgTabTexture@,

Просмотреть файл

@ -223,7 +223,6 @@
/* End selected tab */
/* new tab button border and gradient on hover */
.tabbrowser-tab:hover > .tab-stack > .tab-background:not([selected=true]),
.tabs-newtab-button:hover {
background-image: url(chrome://browser/skin/tabbrowser/tab-background-start.png),
url(chrome://browser/skin/tabbrowser/tab-background-middle.png),
@ -233,6 +232,27 @@
background-size: @tabCurveWidth@ 100%, calc(100% - (2 * @tabCurveWidth@)) 100%, @tabCurveWidth@ 100%;
}
/* normal tab border and gradient on hover */
.tabbrowser-tab:hover > .tab-stack > .tab-background > .tab-background-middle:not([selected=true]) {
background-image: url(chrome://browser/skin/tabbrowser/tab-background-middle.png);
background-repeat: repeat-x;
background-size: auto 100%;
}
.tabbrowser-tab:hover > .tab-stack > .tab-background > .tab-background-start:not([selected=true]),
.tabbrowser-tab:hover > .tab-stack > .tab-background > .tab-background-end:not([selected=true]) {
background-repeat: no-repeat;
background-size: 100% 100%;
}
.tabbrowser-tab:hover > .tab-stack > .tab-background > .tab-background-start:not([selected=true]) {
background-image: url(chrome://browser/skin/tabbrowser/tab-background-start.png);
}
.tabbrowser-tab:hover > .tab-stack > .tab-background > .tab-background-end:not([selected=true]) {
background-image: url(chrome://browser/skin/tabbrowser/tab-background-end.png);
}
/* Tab pointer-events */
.tabbrowser-tab {
pointer-events: none;

Просмотреть файл

@ -22,11 +22,16 @@ gyp_vars = {
'use_system_libvpx': 0,
'build_libjpeg': 0,
'build_libvpx': 0,
'build_libyuv': 0,
'libyuv_dir': '/media/libyuv',
'yuv_disable_avx2': 0 if CONFIG['HAVE_X86_AVX2'] else 1,
# saves 4MB when webrtc_trace is off
'enable_lazy_trace_alloc': 1,
# turn off mandatory use of NEON and instead use NEON detection
'arm_neon': 0,
'arm_neon_optional': 1,
'moz_widget_toolkit_gonk': 0,
@ -89,12 +94,15 @@ gyp_vars['target_arch'] = arches[CONFIG['CPU_ARCH']]
if CONFIG['ARM_ARCH']:
if int(CONFIG['ARM_ARCH']) < 7:
gyp_vars['armv7'] = 0
gyp_vars['arm_neon_optional'] = 0
elif os == 'Android':
gyp_vars['armv7'] = 1
else:
# CPU detection for ARM works on Android only. armv7 always uses CPU
# detection, so we have to set armv7=0 for non-Android target
gyp_vars['armv7'] = 0
# For libyuv
gyp_vars['arm_version'] = CONFIG['ARM_ARCH']
# Don't try to compile ssse3/sse4.1 code if toolchain doesn't support
if CONFIG['INTEL_ARCHITECTURE']:

Просмотреть файл

@ -73,22 +73,27 @@ class RemoteAutomation(Automation):
return env
def waitForFinish(self, proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath):
""" Wait for tests to finish (as evidenced by the process exiting),
or for maxTime elapse, in which case kill the process regardless.
""" Wait for tests to finish.
If maxTime seconds elapse or no output is detected for timeout
seconds, kill the process and fail the test.
"""
# maxTime is used to override the default timeout, we should honor that
status = proc.wait(timeout = maxTime)
status = proc.wait(timeout = maxTime, noOutputTimeout = timeout)
self.lastTestSeen = proc.getLastTestSeen
if (status == 1 and self._devicemanager.getTopActivity() == proc.procName):
# Then we timed out, make sure Fennec is dead
topActivity = self._devicemanager.getTopActivity()
if topActivity == proc.procName:
proc.kill()
if status == 1:
if maxTime:
print "TEST-UNEXPECTED-FAIL | %s | application ran for longer than " \
"allowed maximum time of %s seconds" % (self.lastTestSeen, maxTime)
else:
print "TEST-UNEXPECTED-FAIL | %s | application ran for longer than " \
"allowed maximum time" % (self.lastTestSeen)
proc.kill()
if status == 2:
print "TEST-UNEXPECTED-FAIL | %s | application timed out after %d seconds with no output" \
% (self.lastTestSeen, int(timeout))
return status
@ -176,48 +181,28 @@ class RemoteAutomation(Automation):
def Process(self, cmd, stdout = None, stderr = None, env = None, cwd = None):
if stdout == None or stdout == -1 or stdout == subprocess.PIPE:
stdout = self._remoteLog
stdout = self._remoteLog
return self.RProcess(self._devicemanager, cmd, stdout, stderr, env, cwd)
return self.RProcess(self._devicemanager, cmd, stdout, stderr, env, cwd, self._appName)
# be careful here as this inner class doesn't have access to outer class members
class RProcess(object):
# device manager process
dm = None
def __init__(self, dm, cmd, stdout = None, stderr = None, env = None, cwd = None):
def __init__(self, dm, cmd, stdout = None, stderr = None, env = None, cwd = None, app = None):
self.dm = dm
self.stdoutlen = 0
self.lastTestSeen = "remoteautomation.py"
self.proc = dm.launchProcess(cmd, stdout, cwd, env, True)
if (self.proc is None):
if cmd[0] == 'am':
self.proc = stdout
else:
raise Exception("unable to launch process")
exepath = cmd[0]
name = exepath.split('/')[-1]
self.procName = name
# Hack for Robocop: Derive the actual process name from the command line.
# We expect something like:
# ['am', 'instrument', '-w', '-e', 'class', 'org.mozilla.fennec.tests.testBookmark', 'org.mozilla.roboexample.test/android.test.InstrumentationTestRunner']
# and want to derive 'org.mozilla.fennec'.
if cmd[0] == 'am':
self.proc = stdout
else:
raise Exception("unable to launch process")
self.procName = cmd[0].split('/')[-1]
if cmd[0] == 'am' and cmd[1] == "instrument":
try:
i = cmd.index("class")
except ValueError:
# no "class" argument -- maybe this isn't robocop?
i = -1
if (i > 0):
classname = cmd[i+1]
parts = classname.split('.')
try:
i = parts.index("tests")
except ValueError:
# no "tests" component -- maybe this isn't robocop?
i = -1
if (i > 0):
self.procName = '.'.join(parts[0:i])
print "Robocop derived process name: "+self.procName
self.procName = app
print "Robocop process name: "+self.procName
# Setting timeout at 1 hour since on a remote device this takes much longer
self.timeout = 3600
@ -263,31 +248,43 @@ class RemoteAutomation(Automation):
def getLastTestSeen(self):
return self.lastTestSeen
def wait(self, timeout = None):
# Wait for the remote process to end (or for its activity to go to background).
# While waiting, periodically retrieve the process output and print it.
# If the process is still running after *timeout* seconds, return 1;
# If the process is still running but no output is received in *noOutputTimeout*
# seconds, return 2;
# Else, once the process exits/goes to background, return 0.
def wait(self, timeout = None, noOutputTimeout = None):
timer = 0
noOutputTimer = 0
interval = 20
if timeout == None:
timeout = self.timeout
status = 0
while (self.dm.getTopActivity() == self.procName):
# retrieve log updates every 60 seconds
if timer % 60 == 0:
t = self.stdout
if t != '':
print t
noOutputTimer = 0
time.sleep(interval)
timer += interval
noOutputTimer += interval
if (timer > timeout):
status = 1
break
if (noOutputTimeout and noOutputTimer > noOutputTimeout):
status = 2
break
# Flush anything added to stdout during the sleep
print self.stdout
if (timer >= timeout):
return 1
return 0
return status
def kill(self):
self.dm.killProcess(self.procName)

Просмотреть файл

@ -1351,6 +1351,20 @@ if test "$GNU_CC"; then
AC_MSG_RESULT([no]))
CFLAGS=$_SAVE_CFLAGS
case "${CPU_ARCH}" in
x86 | x86_64)
AC_MSG_CHECKING(for x86 AVX2 asm support in compiler)
AC_TRY_COMPILE([],
[asm volatile ("vpermq \$0xd8,%ymm0,%ymm0 \n");],
result="yes", result="no")
AC_MSG_RESULT("$result")
if test "$result" = "yes"; then
HAVE_X86_AVX2=1
AC_DEFINE(HAVE_X86_AVX2)
AC_SUBST(HAVE_X86_AVX2)
fi
esac
# Turn on GNU-specific warnings:
# -Wall - turn on a lot of warnings
# -Wpointer-arith - good to have

Просмотреть файл

@ -3359,7 +3359,8 @@ void HTMLMediaElement::NotifyDecoderPrincipalChanged()
bool subsumes;
mDecoder->UpdateSameOriginStatus(
NS_SUCCEEDED(NodePrincipal()->Subsumes(principal, &subsumes)) && subsumes);
!principal ||
(NS_SUCCEEDED(NodePrincipal()->Subsumes(principal, &subsumes)) && subsumes));
for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) {
OutputMediaStream* ms = &mOutputStreams[i];

Просмотреть файл

@ -0,0 +1,72 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "AudioCompactor.h"
#if defined(MOZ_MEMORY)
# include "mozmemory.h"
#endif
namespace mozilla {
static size_t
MallocGoodSize(size_t aSize)
{
# if defined(MOZ_MEMORY)
return malloc_good_size(aSize);
# else
return aSize;
# endif
}
static size_t
TooMuchSlop(size_t aSize, size_t aAllocSize, size_t aMaxSlop)
{
// If the allocated size is less then our target size, then we
// are chunking. This means it will be completely filled with
// zero slop.
size_t slop = (aAllocSize > aSize) ? (aAllocSize - aSize) : 0;
return slop > aMaxSlop;
}
uint32_t
AudioCompactor::GetChunkSamples(uint32_t aFrames, uint32_t aChannels,
size_t aMaxSlop)
{
size_t size = AudioDataSize(aFrames, aChannels);
size_t chunkSize = MallocGoodSize(size);
// Reduce the chunk size until we meet our slop goal or the chunk
// approaches an unreasonably small size.
while (chunkSize > 64 && TooMuchSlop(size, chunkSize, aMaxSlop)) {
chunkSize = MallocGoodSize(chunkSize / 2);
}
// Calculate the number of samples based on expected malloc size
// in order to allow as many frames as possible to be packed.
return chunkSize / sizeof(AudioDataValue);
}
uint32_t
AudioCompactor::NativeCopy::operator()(AudioDataValue *aBuffer, size_t aSamples)
{
NS_ASSERTION(aBuffer, "cannot copy to null buffer pointer");
NS_ASSERTION(aSamples, "cannot copy zero values");
size_t bufferBytes = aSamples * sizeof(AudioDataValue);
size_t maxBytes = std::min(bufferBytes, mSourceBytes - mNextByte);
uint32_t frames = maxBytes / BytesPerFrame(mChannels);
size_t bytes = frames * BytesPerFrame(mChannels);
NS_ASSERTION((mNextByte + bytes) <= mSourceBytes,
"tried to copy beyond source buffer");
NS_ASSERTION(bytes <= bufferBytes, "tried to copy beyond destination buffer");
memcpy(aBuffer, mSource + mNextByte, bytes);
mNextByte += bytes;
return frames;
}
} // namespace mozilla

Просмотреть файл

@ -0,0 +1,121 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(AudioCompactor_h)
#define AudioCompactor_h
#include "MediaQueue.h"
#include "MediaData.h"
#include "VideoUtils.h"
namespace mozilla {
class AudioCompactor
{
public:
AudioCompactor(MediaQueue<AudioData>& aQueue)
: mQueue(aQueue)
{ }
// Push audio data into the underlying queue with minimal heap allocation
// slop. This method is responsible for allocating AudioDataValue[] buffers.
// The caller must provide a functor to copy the data into the buffers. The
// functor must provide the following signature:
//
// uint32_t operator()(AudioDataValue *aBuffer, size_t aSamples);
//
// The functor must copy as many complete frames as possible to the provided
// buffer given its length (in AudioDataValue elements). The number of frames
// copied must be returned. This copy functor must support being called
// multiple times in order to copy the audio data fully. The copy functor
// must copy full frames as partial frames will be ignored.
template<typename CopyFunc>
bool Push(int64_t aOffset, int64_t aTime, int32_t aSampleRate,
uint32_t aFrames, uint32_t aChannels, CopyFunc aCopyFunc)
{
// If we are losing more than a reasonable amount to padding, try to chunk
// the data.
size_t maxSlop = AudioDataSize(aFrames, aChannels) / MAX_SLOP_DIVISOR;
while (aFrames > 0) {
uint32_t samples = GetChunkSamples(aFrames, aChannels, maxSlop);
nsAutoArrayPtr<AudioDataValue> buffer(new AudioDataValue[samples]);
// Copy audio data to buffer using caller-provided functor.
uint32_t framesCopied = aCopyFunc(buffer, samples);
NS_ASSERTION(framesCopied <= aFrames, "functor copied too many frames");
CheckedInt64 duration = FramesToUsecs(framesCopied, aSampleRate);
if (!duration.isValid()) {
return false;
}
mQueue.Push(new AudioData(aOffset,
aTime,
duration.value(),
framesCopied,
buffer.forget(),
aChannels));
// Remove the frames we just pushed into the queue and loop if there is
// more to be done.
aTime += duration.value();
aFrames -= framesCopied;
// NOTE: No need to update aOffset as its only an approximation anyway.
}
return true;
}
// Copy functor suitable for copying audio samples already in the
// AudioDataValue format/layout expected by AudioStream on this platform.
class NativeCopy
{
public:
NativeCopy(const uint8_t* aSource, size_t aSourceBytes,
uint32_t aChannels)
: mSource(aSource)
, mSourceBytes(aSourceBytes)
, mChannels(aChannels)
, mNextByte(0)
{ }
uint32_t operator()(AudioDataValue *aBuffer, size_t aSamples);
private:
const uint8_t* const mSource;
const size_t mSourceBytes;
const uint32_t mChannels;
size_t mNextByte;
};
// Allow 12.5% slop before chunking kicks in. Public so that the gtest can
// access it.
static const size_t MAX_SLOP_DIVISOR = 8;
private:
// Compute the number of AudioDataValue samples that will be fit the most
// frames while keeping heap allocation slop less than the given threshold.
static uint32_t
GetChunkSamples(uint32_t aFrames, uint32_t aChannels, size_t aMaxSlop);
static size_t BytesPerFrame(uint32_t aChannels)
{
return sizeof(AudioDataValue) * aChannels;
}
static size_t AudioDataSize(uint32_t aFrames, uint32_t aChannels)
{
return aFrames * BytesPerFrame(aChannels);
}
MediaQueue<AudioData> &mQueue;
};
} // namespace mozilla
#endif // AudioCompactor_h

Просмотреть файл

@ -45,7 +45,8 @@ void* MediaDecoderReader::VideoQueueMemoryFunctor::operator()(void* anObject) {
}
MediaDecoderReader::MediaDecoderReader(AbstractMediaDecoder* aDecoder)
: mDecoder(aDecoder),
: mAudioCompactor(mAudioQueue),
mDecoder(aDecoder),
mIgnoreAudioOutputFormat(false)
{
MOZ_COUNT_CTOR(MediaDecoderReader);
@ -280,4 +281,3 @@ MediaDecoderReader::GetBuffered(mozilla::dom::TimeRanges* aBuffered,
}
} // namespace mozilla

Просмотреть файл

@ -10,6 +10,7 @@
#include "MediaInfo.h"
#include "MediaData.h"
#include "MediaQueue.h"
#include "AudioCompactor.h"
namespace mozilla {
@ -105,6 +106,12 @@ protected:
// the decoder, state machine, and main threads.
MediaQueue<VideoData> mVideoQueue;
// An adapter to the audio queue which first copies data to buffers with
// minimal allocation slop and then pushes them to the queue. This is
// useful for decoders working with formats that give awkward numbers of
// frames such as mp3.
AudioCompactor mAudioCompactor;
public:
// Populates aBuffered with the time ranges which are buffered. aStartTime
// must be the presentation time of the first frame in the media, e.g.

Просмотреть файл

@ -45,6 +45,13 @@ extern PRLogModuleInfo* gMediaDecoderLog;
#define DECODER_LOG(type, msg)
#endif
// GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
// GetTickCount() and conflicts with MediaDecoderStateMachine::GetCurrentTime
// implementation. With unified builds, putting this in headers is not enough.
#ifdef GetCurrentTime
#undef GetCurrentTime
#endif
// Wait this number of seconds when buffering, then leave and play
// as best as we can if the required amount of data hasn't been
// retrieved.

Просмотреть файл

@ -92,6 +92,13 @@ namespace mozilla {
class AudioSegment;
class VideoSegment;
// GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
// GetTickCount() and conflicts with MediaDecoderStateMachine::GetCurrentTime
// implementation.
#ifdef GetCurrentTime
#undef GetCurrentTime
#endif
/*
The state machine class. This manages the decoding and seeking in the
MediaDecoderReader on the decode thread, and A/V sync on the shared

Просмотреть файл

@ -0,0 +1,144 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaTaskQueue.h"
#include "nsThreadUtils.h"
#include "SharedThreadPool.h"
namespace mozilla {
MediaTaskQueue::MediaTaskQueue(TemporaryRef<SharedThreadPool> aPool)
: mPool(aPool)
, mQueueMonitor("MediaTaskQueue::Queue")
, mIsRunning(false)
, mIsShutdown(false)
{
MOZ_COUNT_CTOR(MediaTaskQueue);
}
MediaTaskQueue::~MediaTaskQueue()
{
MonitorAutoLock mon(mQueueMonitor);
MOZ_ASSERT(mIsShutdown);
MOZ_COUNT_DTOR(MediaTaskQueue);
}
nsresult
MediaTaskQueue::Dispatch(nsIRunnable* aRunnable)
{
MonitorAutoLock mon(mQueueMonitor);
if (mIsShutdown) {
return NS_ERROR_FAILURE;
}
mTasks.push(aRunnable);
if (mIsRunning) {
return NS_OK;
}
RefPtr<nsIRunnable> runner(new Runner(this));
nsresult rv = mPool->Dispatch(runner, NS_DISPATCH_NORMAL);
if (NS_FAILED(rv)) {
NS_WARNING("Failed to dispatch runnable to run MediaTaskQueue");
return rv;
}
mIsRunning = true;
return NS_OK;
}
void
MediaTaskQueue::AwaitIdle()
{
MonitorAutoLock mon(mQueueMonitor);
AwaitIdleLocked();
}
void
MediaTaskQueue::AwaitIdleLocked()
{
mQueueMonitor.AssertCurrentThreadOwns();
MOZ_ASSERT(mIsRunning || mTasks.empty());
while (mIsRunning) {
mQueueMonitor.Wait();
}
}
void
MediaTaskQueue::Shutdown()
{
MonitorAutoLock mon(mQueueMonitor);
mIsShutdown = true;
AwaitIdleLocked();
}
void
MediaTaskQueue::Flush()
{
MonitorAutoLock mon(mQueueMonitor);
while (!mTasks.empty()) {
mTasks.pop();
}
AwaitIdleLocked();
}
bool
MediaTaskQueue::IsEmpty()
{
MonitorAutoLock mon(mQueueMonitor);
return mTasks.empty();
}
nsresult
MediaTaskQueue::Runner::Run()
{
RefPtr<nsIRunnable> event;
{
MonitorAutoLock mon(mQueue->mQueueMonitor);
MOZ_ASSERT(mQueue->mIsRunning);
if (mQueue->mTasks.size() == 0) {
mQueue->mIsRunning = false;
mon.NotifyAll();
return NS_OK;
}
event = mQueue->mTasks.front();
mQueue->mTasks.pop();
}
MOZ_ASSERT(event);
// Note that dropping the queue monitor before running the task, and
// taking the monitor again after the task has run ensures we have memory
// fences enforced. This means that if the object we're calling wasn't
// designed to be threadsafe, it will be, provided we're only calling it
// in this task queue.
event->Run();
{
MonitorAutoLock mon(mQueue->mQueueMonitor);
if (mQueue->mTasks.size() == 0) {
// No more events to run. Exit the task runner.
mQueue->mIsRunning = false;
mon.NotifyAll();
return NS_OK;
}
}
// There's at least one more event that we can run. Dispatch this Runner
// to the thread pool again to ensure it runs again. Note that we don't just
// run in a loop here so that we don't hog the thread pool. This means we may
// run on another thread next time, but we rely on the memory fences from
// mQueueMonitor for thread safety of non-threadsafe tasks.
nsresult rv = mQueue->mPool->Dispatch(this, NS_DISPATCH_NORMAL);
if (NS_FAILED(rv)) {
// Failed to dispatch, shutdown!
MonitorAutoLock mon(mQueue->mQueueMonitor);
mQueue->mIsRunning = false;
mQueue->mIsShutdown = true;
mon.NotifyAll();
}
return NS_OK;
}
} // namespace mozilla

Просмотреть файл

@ -0,0 +1,82 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MediaTaskQueue_h_
#define MediaTaskQueue_h_
#include <queue>
#include "mozilla/RefPtr.h"
#include "mozilla/Monitor.h"
#include "nsThreadUtils.h"
class nsIRunnable;
namespace mozilla {
class SharedThreadPool;
// Abstracts executing runnables in order in a thread pool. The runnables
// dispatched to the MediaTaskQueue will be executed in the order in which
// they're received, and are guaranteed to not be executed concurrently.
// They may be executed on different threads, and a memory barrier is used
// to make this threadsafe for objects that aren't already threadsafe.
class MediaTaskQueue : public AtomicRefCounted<MediaTaskQueue> {
public:
MediaTaskQueue(TemporaryRef<SharedThreadPool> aPool);
~MediaTaskQueue();
nsresult Dispatch(nsIRunnable* aRunnable);
// Removes all pending tasks from the task queue, and blocks until
// the currently running task (if any) finishes.
void Flush();
// Blocks until all tasks finish executing, then shuts down the task queue
// and exits.
void Shutdown();
// Blocks until all task finish executing.
void AwaitIdle();
bool IsEmpty();
private:
// Blocks until all task finish executing. Called internally by methods
// that need to wait until the task queue is idle.
// mQueueMonitor must be held.
void AwaitIdleLocked();
RefPtr<SharedThreadPool> mPool;
// Monitor that protects the queue and mIsRunning;
Monitor mQueueMonitor;
// Queue of tasks to run.
std::queue<RefPtr<nsIRunnable>> mTasks;
// True if we've dispatched an event to the pool to execute events from
// the queue.
bool mIsRunning;
// True if we've started our shutdown process.
bool mIsShutdown;
class Runner : public nsRunnable {
public:
Runner(MediaTaskQueue* aQueue)
: mQueue(aQueue)
{
}
NS_METHOD Run() MOZ_OVERRIDE;
private:
RefPtr<MediaTaskQueue> mQueue;
};
};
} // namespace mozilla
#endif // MediaTaskQueue_h_

Просмотреть файл

@ -12,8 +12,12 @@
#define AUDIO_READ_BYTES 4096
// Maximum number of audio frames we will accept from the audio decoder in one
// go.
#define MAX_AUDIO_FRAMES 4096
// go. Carefully select this to work well with both the mp3 1152 max frames
// per block and power-of-2 allocation sizes. Since we must pre-allocate the
// buffer we cannot use AudioCompactor without paying for an additional
// allocation and copy. Therefore, choosing a value that divides exactly into
// 1152 is most memory efficient.
#define MAX_AUDIO_FRAMES 128
namespace mozilla {
@ -201,7 +205,8 @@ AppleMP3Reader::AudioSampleCallback(UInt32 aNumBytes,
LOGD("got %u bytes, %u packets\n", aNumBytes, aNumPackets);
// 1 frame per packet * num channels * 32-bit float
uint32_t decodedSize = MAX_AUDIO_FRAMES * mAudioChannels * 4;
uint32_t decodedSize = MAX_AUDIO_FRAMES * mAudioChannels *
sizeof(AudioDataValue);
// descriptions for _decompressed_ audio packets. ignored.
nsAutoArrayPtr<AudioStreamPacketDescription>
@ -238,6 +243,14 @@ AppleMP3Reader::AudioSampleCallback(UInt32 aNumBytes,
break;
}
// If we decoded zero frames then AudiOConverterFillComplexBuffer is out
// of data to provide. We drained its internal buffer completely on the
// last pass.
if (numFrames == 0 && rv == kNeedMoreData) {
LOGD("FillComplexBuffer out of data exactly\n");
break;
}
int64_t time = FramesToUsecs(mCurrentAudioFrame, mAudioSampleRate).value();
int64_t duration = FramesToUsecs(numFrames, mAudioSampleRate).value();

Просмотреть файл

@ -1,4 +1,5 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
@ -247,6 +248,48 @@ DirectShowReader::Finish(HRESULT aStatus)
return false;
}
class DirectShowCopy
{
public:
DirectShowCopy(uint8_t *aSource, uint32_t aBytesPerSample,
uint32_t aSamples, uint32_t aChannels)
: mSource(aSource)
, mBytesPerSample(aBytesPerSample)
, mSamples(aSamples)
, mChannels(aChannels)
, mNextSample(0)
{ }
uint32_t operator()(AudioDataValue *aBuffer, size_t aSamples)
{
size_t maxSamples = std::min(aSamples, mSamples - mNextSample);
uint32_t frames = maxSamples / mChannels;
size_t byteOffset = mNextSample * mBytesPerSample;
if (mBytesPerSample == 1) {
for (uint32_t i = 0; i < maxSamples; ++i) {
uint8_t *sample = mSource + byteOffset;
aBuffer[i] = UnsignedByteToAudioSample(*sample);
byteOffset += mBytesPerSample;
}
} else if (mBytesPerSample == 2) {
for (uint32_t i = 0; i < maxSamples; ++i) {
int16_t *sample = reinterpret_cast<int16_t *>(mSource + byteOffset);
aBuffer[i] = AudioSampleToFloat(*sample);
byteOffset += mBytesPerSample;
}
}
mNextSample = maxSamples;
return frames;
}
private:
uint8_t * const mSource;
const uint32_t mBytesPerSample;
const uint32_t mSamples;
const uint32_t mChannels;
uint32_t mNextSample;
};
bool
DirectShowReader::DecodeAudioData()
{
@ -281,26 +324,15 @@ DirectShowReader::DecodeAudioData()
hr = sample->GetPointer(&data);
NS_ENSURE_TRUE(SUCCEEDED(hr), Finish(hr));
nsAutoArrayPtr<AudioDataValue> buffer(new AudioDataValue[numSamples]);
AudioDataValue* dst = buffer.get();
if (mBytesPerSample == 1) {
uint8_t* src = reinterpret_cast<uint8_t*>(data);
for (int32_t i = 0; i < numSamples; ++i) {
dst[i] = UnsignedByteToAudioSample(src[i]);
}
} else if (mBytesPerSample == 2) {
int16_t* src = reinterpret_cast<int16_t*>(data);
for (int32_t i = 0; i < numSamples; ++i) {
dst[i] = AudioSampleToFloat(src[i]);
}
}
mAudioQueue.Push(new AudioData(mDecoder->GetResource()->Tell(),
RefTimeToUsecs(start),
RefTimeToUsecs(end - start),
numFrames,
buffer.forget(),
mNumChannels));
mAudioCompactor.Push(mDecoder->GetResource()->Tell(),
RefTimeToUsecs(start),
mInfo.mAudio.mRate,
numFrames,
mNumChannels,
DirectShowCopy(reinterpret_cast<uint8_t *>(data),
mBytesPerSample,
numSamples,
mNumChannels));
return true;
}

Просмотреть файл

@ -12,7 +12,7 @@
#include "VideoUtils.h"
#include "ImageContainer.h"
#include "mp4_demuxer/mp4_demuxer.h"
#include "mp4_demuxer/audio_decoder_config.h"
#include "MediaTaskQueue.h"
namespace mozilla {
@ -22,10 +22,12 @@ template<class BlankMediaDataCreator>
class BlankMediaDataDecoder : public MediaDataDecoder {
public:
BlankMediaDataDecoder(BlankMediaDataCreator* aCreator)
: mCreator(aCreator),
mNextTimeStamp(-1),
mNextOffset(-1)
BlankMediaDataDecoder(BlankMediaDataCreator* aCreator,
MediaTaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback)
: mCreator(aCreator)
, mTaskQueue(aTaskQueue)
, mCallback(aCallback)
{
}
@ -37,56 +39,65 @@ public:
return NS_OK;
}
virtual DecoderStatus Input(nsAutoPtr<mp4_demuxer::MP4Sample>& aSample) MOZ_OVERRIDE
class OutputEvent : public nsRunnable {
public:
OutputEvent(mp4_demuxer::MP4Sample* aSample,
MediaDataDecoderCallback* aCallback,
BlankMediaDataCreator* aCreator)
: mSample(aSample)
, mCallback(aCallback)
, mCreator(aCreator)
{
}
NS_IMETHOD Run() MOZ_OVERRIDE
{
mCallback->Output(mCreator->Create(mSample->composition_timestamp,
mSample->duration,
mSample->byte_offset));
return NS_OK;
}
private:
nsAutoPtr<mp4_demuxer::MP4Sample> mSample;
BlankMediaDataCreator* mCreator;
MediaDataDecoderCallback* mCallback;
};
virtual nsresult Input(mp4_demuxer::MP4Sample* aSample) MOZ_OVERRIDE
{
// Accepts input, and outputs on the second input, using the difference
// in DTS as the duration.
if (mOutput) {
return DECODE_STATUS_NOT_ACCEPTING;
}
Microseconds timestamp = aSample->composition_timestamp;
if (mNextTimeStamp != -1 && mNextOffset != -1) {
Microseconds duration = timestamp - mNextTimeStamp;
mOutput = mCreator->Create(mNextTimeStamp, duration, mNextOffset);
}
mNextTimeStamp = timestamp;
mNextOffset = aSample->byte_offset;
return DECODE_STATUS_OK;
// The MediaDataDecoder must delete the sample when we're finished
// with it, so the OutputEvent stores it in an nsAutoPtr and deletes
// it once it's run.
RefPtr<nsIRunnable> r(new OutputEvent(aSample, mCallback, mCreator));
mTaskQueue->Dispatch(r);
return NS_OK;
}
virtual DecoderStatus Output(nsAutoPtr<MediaData>& aOutData) MOZ_OVERRIDE
{
if (!mOutput) {
return DECODE_STATUS_NEED_MORE_INPUT;
}
aOutData = mOutput.forget();
return DECODE_STATUS_OK;
virtual nsresult Flush() MOZ_OVERRIDE {
return NS_OK;
}
virtual DecoderStatus Flush() MOZ_OVERRIDE {
return DECODE_STATUS_OK;
virtual nsresult Drain() MOZ_OVERRIDE {
return NS_OK;
}
private:
nsAutoPtr<BlankMediaDataCreator> mCreator;
Microseconds mNextTimeStamp;
int64_t mNextOffset;
nsAutoPtr<MediaData> mOutput;
bool mHasInput;
RefPtr<MediaTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
};
static const uint32_t sFrameWidth = 320;
static const uint32_t sFrameHeight = 240;
class BlankVideoDataCreator {
public:
BlankVideoDataCreator(layers::ImageContainer* aImageContainer)
: mImageContainer(aImageContainer)
BlankVideoDataCreator(uint32_t aFrameWidth,
uint32_t aFrameHeight,
layers::ImageContainer* aImageContainer)
: mFrameWidth(aFrameWidth)
, mFrameHeight(aFrameHeight)
, mImageContainer(aImageContainer)
{
mInfo.mDisplay = nsIntSize(sFrameWidth, sFrameHeight);
mPicture = nsIntRect(0, 0, sFrameWidth, sFrameHeight);
mInfo.mDisplay = nsIntSize(mFrameWidth, mFrameHeight);
mPicture = nsIntRect(0, 0, mFrameWidth, mFrameHeight);
}
MediaData* Create(Microseconds aDTS,
@ -97,31 +108,31 @@ public:
// with a U and V plane that are half the size of the Y plane, i.e 8 bit,
// 2x2 subsampled. Have the data pointers of each frame point to the
// first plane, they'll always be zero'd memory anyway.
uint8_t* frame = new uint8_t[sFrameWidth * sFrameHeight];
memset(frame, 0, sFrameWidth * sFrameHeight);
uint8_t* frame = new uint8_t[mFrameWidth * mFrameHeight];
memset(frame, 0, mFrameWidth * mFrameHeight);
VideoData::YCbCrBuffer buffer;
// Y plane.
buffer.mPlanes[0].mData = frame;
buffer.mPlanes[0].mStride = sFrameWidth;
buffer.mPlanes[0].mHeight = sFrameHeight;
buffer.mPlanes[0].mWidth = sFrameWidth;
buffer.mPlanes[0].mStride = mFrameWidth;
buffer.mPlanes[0].mHeight = mFrameHeight;
buffer.mPlanes[0].mWidth = mFrameWidth;
buffer.mPlanes[0].mOffset = 0;
buffer.mPlanes[0].mSkip = 0;
// Cb plane.
buffer.mPlanes[1].mData = frame;
buffer.mPlanes[1].mStride = sFrameWidth / 2;
buffer.mPlanes[1].mHeight = sFrameHeight / 2;
buffer.mPlanes[1].mWidth = sFrameWidth / 2;
buffer.mPlanes[1].mStride = mFrameWidth / 2;
buffer.mPlanes[1].mHeight = mFrameHeight / 2;
buffer.mPlanes[1].mWidth = mFrameWidth / 2;
buffer.mPlanes[1].mOffset = 0;
buffer.mPlanes[1].mSkip = 0;
// Cr plane.
buffer.mPlanes[2].mData = frame;
buffer.mPlanes[2].mStride = sFrameWidth / 2;
buffer.mPlanes[2].mHeight = sFrameHeight / 2;
buffer.mPlanes[2].mWidth = sFrameWidth / 2;
buffer.mPlanes[2].mStride = mFrameWidth / 2;
buffer.mPlanes[2].mHeight = mFrameHeight / 2;
buffer.mPlanes[2].mWidth = mFrameWidth / 2;
buffer.mPlanes[2].mOffset = 0;
buffer.mPlanes[2].mSkip = 0;
@ -139,6 +150,8 @@ public:
private:
VideoInfo mInfo;
nsIntRect mPicture;
uint32_t mFrameWidth;
uint32_t mFrameHeight;
RefPtr<layers::ImageContainer> mImageContainer;
};
@ -205,18 +218,28 @@ public:
// Decode thread.
virtual MediaDataDecoder* CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer) MOZ_OVERRIDE {
BlankVideoDataCreator* decoder = new BlankVideoDataCreator(aImageContainer);
return new BlankMediaDataDecoder<BlankVideoDataCreator>(decoder);
layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE {
BlankVideoDataCreator* decoder = new BlankVideoDataCreator(aConfig.visible_rect().width(),
aConfig.visible_rect().height(),
aImageContainer);
return new BlankMediaDataDecoder<BlankVideoDataCreator>(decoder,
aVideoTaskQueue,
aCallback);
}
// Decode thread.
virtual MediaDataDecoder* CreateAACDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig) MOZ_OVERRIDE {
virtual MediaDataDecoder* CreateAACDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,
MediaTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE {
BlankAudioDataCreator* decoder =
new BlankAudioDataCreator(ChannelLayoutToChannelCount(aConfig.channel_layout()),
aConfig.samples_per_second(),
aConfig.bits_per_channel());
return new BlankMediaDataDecoder<BlankAudioDataCreator>(decoder);
return new BlankMediaDataDecoder<BlankAudioDataCreator>(decoder,
aAudioTaskQueue,
aCallback);
}
};

Просмотреть файл

@ -13,6 +13,8 @@
#include "mozilla/dom/HTMLMediaElement.h"
#include "ImageContainer.h"
#include "Layers.h"
#include "SharedThreadPool.h"
#include "mozilla/Preferences.h"
using mozilla::layers::Image;
using mozilla::layers::LayerManager;
@ -80,17 +82,44 @@ private:
};
MP4Reader::MP4Reader(AbstractMediaDecoder* aDecoder)
: MediaDecoderReader(aDecoder),
mLayersBackendType(layers::LayersBackend::LAYERS_NONE),
mHasAudio(false),
mHasVideo(false)
: MediaDecoderReader(aDecoder)
, mLayersBackendType(layers::LayersBackend::LAYERS_NONE)
, mAudio("MP4 audio decoder data", Preferences::GetUint("media.mp4-audio-decode-ahead", 2))
, mVideo("MP4 video decoder data", Preferences::GetUint("media.mp4-video-decode-ahead", 2))
, mLastReportedNumDecodedFrames(0)
{
MOZ_ASSERT(NS_IsMainThread(), "Must be on main thread.");
MOZ_COUNT_CTOR(MP4Reader);
}
MP4Reader::~MP4Reader()
{
MOZ_ASSERT(NS_IsMainThread(), "Must be on main thread.");
MOZ_COUNT_DTOR(MP4Reader);
Shutdown();
}
void
MP4Reader::Shutdown()
{
if (mAudio.mDecoder) {
Flush(kAudio);
mAudio.mDecoder->Shutdown();
mAudio.mDecoder = nullptr;
}
if (mAudio.mTaskQueue) {
mAudio.mTaskQueue->Shutdown();
mAudio.mTaskQueue = nullptr;
}
if (mVideo.mDecoder) {
Flush(kVideo);
mVideo.mDecoder->Shutdown();
mVideo.mDecoder = nullptr;
}
if (mVideo.mTaskQueue) {
mVideo.mTaskQueue->Shutdown();
mVideo.mTaskQueue = nullptr;
}
}
void
@ -129,6 +158,14 @@ MP4Reader::Init(MediaDecoderReader* aCloneDonor)
InitLayersBackendType();
mAudio.mTaskQueue = new MediaTaskQueue(
SharedThreadPool::Get(NS_LITERAL_CSTRING("MP4 Audio Decode")));
NS_ENSURE_TRUE(mAudio.mTaskQueue, NS_ERROR_FAILURE);
mVideo.mTaskQueue = new MediaTaskQueue(
SharedThreadPool::Get(NS_LITERAL_CSTRING("MP4 Video Decode")));
NS_ENSURE_TRUE(mVideo.mTaskQueue, NS_ERROR_FAILURE);
return NS_OK;
}
@ -140,42 +177,47 @@ MP4Reader::ReadMetadata(MediaInfo* aInfo,
NS_ENSURE_TRUE(ok, NS_ERROR_FAILURE);
const AudioDecoderConfig& audio = mDemuxer->AudioConfig();
mInfo.mAudio.mHasAudio = mHasAudio = mDemuxer->HasAudio() &&
audio.IsValidConfig();
mInfo.mAudio.mHasAudio = mAudio.mActive = mDemuxer->HasAudio() &&
audio.IsValidConfig();
// If we have audio, we *only* allow AAC to be decoded.
if (mHasAudio && audio.codec() != kCodecAAC) {
if (HasAudio() && audio.codec() != kCodecAAC) {
return NS_ERROR_FAILURE;
}
const VideoDecoderConfig& video = mDemuxer->VideoConfig();
mInfo.mVideo.mHasVideo = mHasVideo = mDemuxer->HasVideo() &&
video.IsValidConfig();
mInfo.mVideo.mHasVideo = mVideo.mActive = mDemuxer->HasVideo() &&
video.IsValidConfig();
// If we have video, we *only* allow H.264 to be decoded.
if (mHasVideo && video.codec() != kCodecH264) {
if (HasVideo() && video.codec() != kCodecH264) {
return NS_ERROR_FAILURE;
}
mPlatform = PlatformDecoderModule::Create();
NS_ENSURE_TRUE(mPlatform, NS_ERROR_FAILURE);
if (mHasAudio) {
if (HasAudio()) {
mInfo.mAudio.mRate = audio.samples_per_second();
mInfo.mAudio.mChannels = ChannelLayoutToChannelCount(audio.channel_layout());
mAudioDecoder = mPlatform->CreateAACDecoder(audio);
NS_ENSURE_TRUE(mAudioDecoder != nullptr, NS_ERROR_FAILURE);
nsresult rv = mAudioDecoder->Init();
mAudio.mCallback = new DecoderCallback(this, kAudio);
mAudio.mDecoder = mPlatform->CreateAACDecoder(audio,
mAudio.mTaskQueue,
mAudio.mCallback);
NS_ENSURE_TRUE(mAudio.mDecoder != nullptr, NS_ERROR_FAILURE);
nsresult rv = mAudio.mDecoder->Init();
NS_ENSURE_SUCCESS(rv, rv);
}
mInfo.mVideo.mHasVideo = mHasVideo = mDemuxer->HasVideo();
if (mHasVideo) {
if (HasVideo()) {
IntSize sz = video.natural_size();
mInfo.mVideo.mDisplay = nsIntSize(sz.width(), sz.height());
mVideoDecoder = mPlatform->CreateH264Decoder(video,
mLayersBackendType,
mDecoder->GetImageContainer());
NS_ENSURE_TRUE(mVideoDecoder != nullptr, NS_ERROR_FAILURE);
nsresult rv = mVideoDecoder->Init();
mVideo.mCallback = new DecoderCallback(this, kVideo);
mVideo.mDecoder = mPlatform->CreateH264Decoder(video,
mLayersBackendType,
mDecoder->GetImageContainer(),
mVideo.mTaskQueue,
mVideo.mCallback);
NS_ENSURE_TRUE(mVideo.mDecoder != nullptr, NS_ERROR_FAILURE);
nsresult rv = mVideo.mDecoder->Init();
NS_ENSURE_SUCCESS(rv, rv);
}
@ -200,29 +242,32 @@ MP4Reader::ReadMetadata(MediaInfo* aInfo,
bool
MP4Reader::HasAudio()
{
return mHasAudio;
return mAudio.mActive;
}
bool
MP4Reader::HasVideo()
{
return mHasVideo;
return mVideo.mActive;
}
MP4Reader::DecoderData&
MP4Reader::GetDecoderData(TrackType aTrack)
{
MOZ_ASSERT(aTrack == kAudio || aTrack == kVideo);
return (aTrack == kAudio) ? mAudio : mVideo;
}
MP4SampleQueue&
MP4Reader::SampleQueue(TrackType aTrack)
{
MOZ_ASSERT(aTrack == kAudio || aTrack == kVideo);
return (aTrack == kAudio) ? mCompressedAudioQueue
: mCompressedVideoQueue;
return GetDecoderData(aTrack).mDemuxedSamples;
}
MediaDataDecoder*
MP4Reader::Decoder(mp4_demuxer::TrackType aTrack)
{
MOZ_ASSERT(aTrack == kAudio || aTrack == kVideo);
return (aTrack == kAudio) ? mAudioDecoder
: mVideoDecoder;
return GetDecoderData(aTrack).mDecoder;
}
MP4Sample*
@ -250,81 +295,180 @@ MP4Reader::PopSample(TrackType aTrack)
return sample;
}
// How async decoding works:
//
// When MP4Reader::Decode() is called:
// * Lock the DecoderData. We assume the state machine wants
// output from the decoder (in future, we'll assume decoder wants input
// when the output MediaQueue isn't "full").
// * Cache the value of mNumSamplesOutput, as prevFramesOutput.
// * While we've not output data (mNumSamplesOutput != prevNumFramesOutput)
// and while we still require input, we demux and input data in the reader.
// We assume we require input if
// ((mNumSamplesInput - mNumSamplesOutput) < sDecodeAheadMargin) or
// mInputExhausted is true. Before we send input, we reset mInputExhausted
// and increment mNumFrameInput, and drop the lock on DecoderData.
// * Once we no longer require input, we wait on the DecoderData
// lock for output, or for the input exhausted callback. If we receive the
// input exhausted callback, we go back and input more data.
// * When our output callback is called, we take the DecoderData lock and
// increment mNumSamplesOutput. We notify the DecoderData lock. This will
// awaken the Decode thread, and unblock it, and it will return.
bool
MP4Reader::Decode(TrackType aTrack, nsAutoPtr<MediaData>& aOutData)
MP4Reader::Decode(TrackType aTrack)
{
MP4SampleQueue& sampleQueue = SampleQueue(aTrack);
MediaDataDecoder* decoder = Decoder(aTrack);
DecoderData& data = GetDecoderData(aTrack);
MOZ_ASSERT(data.mDecoder);
MOZ_ASSERT(decoder);
// Loop until we hit a return condition; we produce samples, or hit an error.
while (true) {
DecoderStatus status = decoder->Output(aOutData);
if (status == DECODE_STATUS_OK) {
MOZ_ASSERT(aOutData);
return true;
}
// |aOutData| should only be non-null in success case.
MOZ_ASSERT(!aOutData);
if (status == DECODE_STATUS_ERROR) {
data.mMonitor.Lock();
uint64_t prevNumFramesOutput = data.mNumSamplesOutput;
while (prevNumFramesOutput == data.mNumSamplesOutput) {
data.mMonitor.AssertCurrentThreadOwns();
if (data.mError) {
// Decode error!
data.mMonitor.Unlock();
return false;
}
if (status == DECODE_STATUS_NEED_MORE_INPUT) {
// We need to push more data from the demuxer into the decoder.
// Now loop back and try to extract output again.
nsAutoPtr<MP4Sample> compressed;
do {
compressed = PopSample(aTrack);
if (!compressed) {
// EOS, or error. Let the state machine know there are no more
// frames coming.
return false;
}
status = decoder->Input(compressed);
} while (status == DECODE_STATUS_OK);
if (status == DECODE_STATUS_NOT_ACCEPTING) {
// Decoder should now be able to produce an output.
if (compressed != nullptr) {
// Decoder didn't consume data, attempt to decode the same
// sample next time.
SampleQueue(aTrack).push_front(compressed.forget());
}
continue;
// Send input to the decoder, if we need to. We assume the decoder
// needs input if it's told us it's out of input, or we're beneath the
// "low water mark" for the amount of input we've sent it vs the amount
// out output we've received. We always try to keep the decoder busy if
// possible, so we try to maintain at least a few input samples ahead,
// if we need output.
while (prevNumFramesOutput == data.mNumSamplesOutput &&
(data.mInputExhausted ||
(data.mNumSamplesInput - data.mNumSamplesOutput) < data.mDecodeAhead)) {
data.mMonitor.AssertCurrentThreadOwns();
data.mMonitor.Unlock();
nsAutoPtr<MP4Sample> compressed = PopSample(aTrack);
if (!compressed) {
// EOS, or error. Let the state machine know there are no more
// frames coming.
return false;
}
LOG("MP4Reader decode failure. track=%d status=%d\n", aTrack, status);
return false;
} else {
LOG("MP4Reader unexpected error. track=%d status=%d\n", aTrack, status);
return false;
data.mMonitor.Lock();
data.mInputExhausted = false;
data.mNumSamplesInput++;
data.mMonitor.Unlock();
if (NS_FAILED(data.mDecoder->Input(compressed))) {
return false;
}
// If Input() failed, we let the auto pointer delete |compressed|.
// Otherwise, we assume the decoder will delete it when it's finished
// with it.
compressed.forget();
data.mMonitor.Lock();
}
data.mMonitor.AssertCurrentThreadOwns();
while (!data.mError &&
prevNumFramesOutput == data.mNumSamplesOutput &&
!data.mInputExhausted ) {
data.mMonitor.Wait();
}
}
data.mMonitor.AssertCurrentThreadOwns();
data.mMonitor.Unlock();
return true;
}
static const char*
TrackTypeToStr(TrackType aTrack)
{
MOZ_ASSERT(aTrack == kAudio || aTrack == kVideo);
switch (aTrack) {
case kAudio: return "Audio";
case kVideo: return "Video";
default: return "Unknown";
}
}
void
MP4Reader::Output(mp4_demuxer::TrackType aTrack, MediaData* aSample)
{
#ifdef LOG_SAMPLE_DECODE
LOG("Decoded %s sample time=%lld dur=%lld",
TrackTypeToStr(aTrack), aSample->mTime, aSample->mDuration);
#endif
DecoderData& data = GetDecoderData(aTrack);
// Don't accept output while we're flushing.
MonitorAutoLock mon(data.mMonitor);
if (data.mIsFlushing) {
mon.NotifyAll();
return;
}
switch (aTrack) {
case kAudio: {
MOZ_ASSERT(aSample->mType == MediaData::AUDIO_SAMPLES);
AudioQueue().Push(static_cast<AudioData*>(aSample));
break;
}
case kVideo: {
MOZ_ASSERT(aSample->mType == MediaData::VIDEO_FRAME);
VideoQueue().Push(static_cast<VideoData*>(aSample));
break;
}
default:
break;
}
data.mNumSamplesOutput++;
mon.NotifyAll();
}
void
MP4Reader::InputExhausted(mp4_demuxer::TrackType aTrack)
{
DecoderData& data = GetDecoderData(aTrack);
MonitorAutoLock mon(data.mMonitor);
data.mInputExhausted = true;
mon.NotifyAll();
}
void
MP4Reader::Error(mp4_demuxer::TrackType aTrack)
{
DecoderData& data = GetDecoderData(aTrack);
MonitorAutoLock mon(data.mMonitor);
data.mError = true;
mon.NotifyAll();
}
bool
MP4Reader::DecodeAudioData()
{
MOZ_ASSERT(mHasAudio && mPlatform && mAudioDecoder);
nsAutoPtr<MediaData> audio;
bool ok = Decode(kAudio, audio);
if (ok && audio && audio->mType == MediaData::AUDIO_SAMPLES) {
#ifdef LOG_SAMPLE_DECODE
LOG("DecodeAudioData time=%lld dur=%lld", audio->mTime, audio->mDuration);
#endif
mAudioQueue.Push(static_cast<AudioData*>(audio.forget()));
MOZ_ASSERT(HasAudio() && mPlatform && mAudio.mDecoder);
return Decode(kAudio);
}
void
MP4Reader::Flush(mp4_demuxer::TrackType aTrack)
{
DecoderData& data = GetDecoderData(aTrack);
if (!data.mDecoder) {
return;
}
// Purge the current decoder's state.
// Set a flag so that we ignore all output while we call
// MediaDataDecoder::Flush().
{
data.mIsFlushing = true;
MonitorAutoLock mon(data.mMonitor);
}
data.mDecoder->Flush();
{
data.mIsFlushing = false;
MonitorAutoLock mon(data.mMonitor);
}
return ok;
}
bool
MP4Reader::SkipVideoDemuxToNextKeyFrame(int64_t aTimeThreshold, uint32_t& parsed)
{
MOZ_ASSERT(mVideoDecoder);
MOZ_ASSERT(mVideo.mDecoder);
// Purge the current decoder's state.
mVideoDecoder->Flush();
Flush(kVideo);
// Loop until we reach the next keyframe after the threshold.
while (true) {
@ -338,7 +482,7 @@ MP4Reader::SkipVideoDemuxToNextKeyFrame(int64_t aTimeThreshold, uint32_t& parsed
compressed->composition_timestamp < aTimeThreshold) {
continue;
}
mCompressedVideoQueue.push_front(compressed.forget());
mVideo.mDemuxedSamples.push_front(compressed.forget());
break;
}
@ -354,7 +498,7 @@ MP4Reader::DecodeVideoFrame(bool &aKeyframeSkip,
uint32_t parsed = 0, decoded = 0;
AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
MOZ_ASSERT(mHasVideo && mPlatform && mVideoDecoder);
MOZ_ASSERT(HasVideo() && mPlatform && mVideo.mDecoder);
if (aKeyframeSkip) {
bool ok = SkipVideoDemuxToNextKeyFrame(aTimeThreshold, parsed);
@ -363,25 +507,20 @@ MP4Reader::DecodeVideoFrame(bool &aKeyframeSkip,
return false;
}
aKeyframeSkip = false;
nsresult rv = mVideo.mDecoder->Flush();
NS_ENSURE_SUCCESS(rv, false);
}
nsAutoPtr<MediaData> data;
bool ok = Decode(kVideo, data);
MOZ_ASSERT(!data || data->mType == MediaData::VIDEO_FRAME);
if (ok && data) {
parsed++;
if (data->mTime < aTimeThreshold) {
// Skip frame, it's too late to be displayed.
return true;
}
decoded++;
VideoData* video = static_cast<VideoData*>(data.forget());
#ifdef LOG_SAMPLE_DECODE
LOG("DecodeVideoData time=%lld dur=%lld", video->mTime, video->mDuration);
#endif
mVideoQueue.Push(video);
bool rv = Decode(kVideo);
{
// Report the number of "decoded" frames as the difference in the
// mNumSamplesOutput field since the last time we were called.
MonitorAutoLock mon(mVideo.mMonitor);
uint64_t delta = mVideo.mNumSamplesOutput - mLastReportedNumDecodedFrames;
decoded = static_cast<uint32_t>(delta);
mLastReportedNumDecodedFrames = mVideo.mNumSamplesOutput;
}
return ok;
return rv;
}
nsresult
@ -396,24 +535,4 @@ MP4Reader::Seek(int64_t aTime,
return NS_ERROR_NOT_IMPLEMENTED;
}
void
MP4Reader::OnDecodeThreadStart()
{
MOZ_ASSERT(!NS_IsMainThread(), "Must not be on main thread.");
MOZ_ASSERT(mDecoder->OnDecodeThread(), "Should be on decode thread.");
if (mPlatform) {
mPlatform->OnDecodeThreadStart();
}
}
void
MP4Reader::OnDecodeThreadFinish()
{
MOZ_ASSERT(!NS_IsMainThread(), "Must not be on main thread.");
MOZ_ASSERT(mDecoder->OnDecodeThread(), "Should be on decode thread.");
if (mPlatform) {
mPlatform->OnDecodeThreadFinish();
}
}
} // namespace mozilla

Просмотреть файл

@ -12,6 +12,7 @@
#include "PlatformDecoderModule.h"
#include "mp4_demuxer/mp4_demuxer.h"
#include "mp4_demuxer/box_definitions.h"
#include "MediaTaskQueue.h"
#include <deque>
#include "mozilla/Monitor.h"
@ -49,40 +50,103 @@ public:
int64_t aStartTime,
int64_t aEndTime,
int64_t aCurrentTime) MOZ_OVERRIDE;
virtual void OnDecodeThreadStart() MOZ_OVERRIDE;
virtual void OnDecodeThreadFinish() MOZ_OVERRIDE;
private:
// Destroys all decoder resources.
void Shutdown();
// Initializes mLayersBackendType if possible.
void InitLayersBackendType();
MP4SampleQueue& SampleQueue(mp4_demuxer::TrackType aTrack);
// Blocks until the demuxer produces an sample of specified type.
// Returns nullptr on error on EOS. Caller must delete sample.
mp4_demuxer::MP4Sample* PopSample(mp4_demuxer::TrackType aTrack);
bool Decode(mp4_demuxer::TrackType aTrack,
nsAutoPtr<MediaData>& aOutData);
MediaDataDecoder* Decoder(mp4_demuxer::TrackType aTrack);
bool SkipVideoDemuxToNextKeyFrame(int64_t aTimeThreshold, uint32_t& parsed);
void Output(mp4_demuxer::TrackType aType, MediaData* aSample);
void InputExhausted(mp4_demuxer::TrackType aTrack);
void Error(mp4_demuxer::TrackType aTrack);
bool Decode(mp4_demuxer::TrackType aTrack);
void Flush(mp4_demuxer::TrackType aTrack);
nsAutoPtr<mp4_demuxer::MP4Demuxer> mDemuxer;
nsAutoPtr<MP4Stream> mMP4Stream;
nsAutoPtr<PlatformDecoderModule> mPlatform;
nsAutoPtr<MediaDataDecoder> mVideoDecoder;
nsAutoPtr<MediaDataDecoder> mAudioDecoder;
MP4SampleQueue mCompressedAudioQueue;
MP4SampleQueue mCompressedVideoQueue;
class DecoderCallback : public MediaDataDecoderCallback {
public:
DecoderCallback(MP4Reader* aReader,
mp4_demuxer::TrackType aType)
: mReader(aReader)
, mType(aType)
{
}
virtual void Output(MediaData* aSample) MOZ_OVERRIDE {
mReader->Output(mType, aSample);
}
virtual void InputExhausted() MOZ_OVERRIDE {
mReader->InputExhausted(mType);
}
virtual void Error() MOZ_OVERRIDE {
mReader->Error(mType);
}
private:
MP4Reader* mReader;
mp4_demuxer::TrackType mType;
};
struct DecoderData {
DecoderData(const char* aMonitorName,
uint32_t aDecodeAhead)
: mMonitor(aMonitorName)
, mNumSamplesInput(0)
, mNumSamplesOutput(0)
, mDecodeAhead(aDecodeAhead)
, mActive(false)
, mInputExhausted(false)
, mError(false)
, mIsFlushing(false)
{
}
// The platform decoder.
RefPtr<MediaDataDecoder> mDecoder;
// Queue of input extracted by the demuxer, but not yet sent to the
// platform decoder.
MP4SampleQueue mDemuxedSamples;
// TaskQueue on which decoder can choose to decode.
// Only non-null up until the decoder is created.
RefPtr<MediaTaskQueue> mTaskQueue;
// Callback that receives output and error notifications from the decoder.
nsAutoPtr<DecoderCallback> mCallback;
// Monitor that protects all non-threadsafe state; the primitives
// that follow.
Monitor mMonitor;
uint64_t mNumSamplesInput;
uint64_t mNumSamplesOutput;
uint32_t mDecodeAhead;
// Whether this stream exists in the media.
bool mActive;
bool mInputExhausted;
bool mError;
bool mIsFlushing;
};
DecoderData mAudio;
DecoderData mVideo;
// The last number of decoded output frames that we've reported to
// MediaDecoder::NotifyDecoded(). We diff the number of output video
// frames every time that DecodeVideoData() is called, and report the
// delta there.
uint64_t mLastReportedNumDecodedFrames;
DecoderData& GetDecoderData(mp4_demuxer::TrackType aTrack);
MP4SampleQueue& SampleQueue(mp4_demuxer::TrackType aTrack);
MediaDataDecoder* Decoder(mp4_demuxer::TrackType aTrack);
layers::LayersBackend mLayersBackendType;
bool mHasAudio;
bool mHasVideo;
};
} // namespace mozilla

Просмотреть файл

@ -10,6 +10,8 @@
#include "MediaDecoderReader.h"
#include "mozilla/layers/LayersTypes.h"
#include "nsTArray.h"
#include "mozilla/RefPtr.h"
#include <queue>
namespace mp4_demuxer {
class VideoDecoderConfig;
@ -17,6 +19,8 @@ class AudioDecoderConfig;
struct MP4Sample;
}
class nsIThreadPool;
namespace mozilla {
namespace layers {
@ -24,21 +28,28 @@ class ImageContainer;
}
class MediaDataDecoder;
class MediaDataDecoderCallback;
class MediaInputQueue;
class MediaTaskQueue;
typedef int64_t Microseconds;
// The PlatformDecoderModule interface is used by the MP4Reader to abstract
// access to the H264 and AAC decoders provided by various platforms. It
// may be extended to support other codecs in future. Each platform (Windows,
// MacOSX, Linux etc) must implement a PlatformDecoderModule to provide access
// to its decoders in order to get decompressed H.264/AAC from the MP4Reader.
// MacOSX, Linux, B2G etc) must implement a PlatformDecoderModule to provide
// access to its decoders in order to get decompressed H.264/AAC from the
// MP4Reader.
//
// Video decoding is asynchronous, and should be performed on the task queue
// provided if the underlying platform isn't already exposing an async API.
//
// Platforms that don't have a corresponding PlatformDecoderModule won't be
// able to play the H.264/AAC data output by the MP4Reader. In practice this
// means that we won't have fragmented MP4 supported in Media Source
// Extensions on platforms without PlatformDecoderModules.
// Extensions.
//
// A cross-platform decoder module that discards input and produces "blank"
// output samples exists for testing, and is created if the pref
// output samples exists for testing, and is created when the pref
// "media.fragmented-mp4.use-blank-decoder" is true.
class PlatformDecoderModule {
public:
@ -50,8 +61,8 @@ public:
// the platform we're running on. Caller is responsible for deleting this
// instance. It's expected that there will be multiple
// PlatformDecoderModules alive at the same time. There is one
// PlatformDecoderModule's created per MP4Reader.
// This is called on the main thread.
// PlatformDecoderModule created per MP4Reader.
// This is called on the decode thread.
static PlatformDecoderModule* Create();
// Called to shutdown the decoder module and cleanup state. This should
@ -61,122 +72,118 @@ public:
// Called on the main thread only.
virtual nsresult Shutdown() = 0;
// Creates and initializes an H.264 decoder. The layers backend is
// passed in so that decoders can determine whether hardware accelerated
// decoding can be used. Returns nullptr if the decoder can't be
// initialized.
// Creates an H.264 decoder. The layers backend is passed in so that
// decoders can determine whether hardware accelerated decoding can be used.
// Asynchronous decoding of video should be done in runnables dispatched
// to aVideoTaskQueue. If the task queue isn't needed, the decoder should
// not hold a reference to it.
// Output and errors should be returned to the reader via aCallback.
// On Windows the task queue's threads in have MSCOM initialized with
// COINIT_MULTITHREADED.
// Returns nullptr if the decoder can't be created.
// It is safe to store a reference to aConfig.
// Called on decode thread.
virtual MediaDataDecoder* CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer) = 0;
layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) = 0;
// Creates and initializes an AAC decoder with the specified properties.
// The raw AAC AudioSpecificConfig as contained in the esds box. Some
// decoders need that to initialize. The caller owns the AAC config,
// so it must be copied if it is to be retained by the decoder.
// Returns nullptr if the decoder can't be initialized.
// Creates an AAC decoder with the specified properties.
// Asynchronous decoding of audio should be done in runnables dispatched to
// aAudioTaskQueue. If the task queue isn't needed, the decoder should
// not hold a reference to it.
// Output and errors should be returned to the reader via aCallback.
// Returns nullptr if the decoder can't be created.
// On Windows the task queue's threads in have MSCOM initialized with
// COINIT_MULTITHREADED.
// It is safe to store a reference to aConfig.
// Called on decode thread.
virtual MediaDataDecoder* CreateAACDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig) = 0;
// Called when a decode thread is started. Called on decode thread.
virtual void OnDecodeThreadStart() {}
// Called just before a decode thread is finishing. Called on decode thread.
virtual void OnDecodeThreadFinish() {}
virtual MediaDataDecoder* CreateAACDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,
MediaTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback) = 0;
virtual ~PlatformDecoderModule() {}
protected:
PlatformDecoderModule() {}
// Caches pref media.fragmented-mp4.use-blank-decoder
static bool sUseBlankDecoder;
};
// Return value of the MediaDataDecoder functions.
enum DecoderStatus {
DECODE_STATUS_NOT_ACCEPTING, // Can't accept input at this time. Decoder can produce output.
DECODE_STATUS_NEED_MORE_INPUT, // Can't produce output. Decoder can accept input.
DECODE_STATUS_OK,
DECODE_STATUS_ERROR
// A callback used by MediaDataDecoder to return output/errors to the
// MP4Reader. Implementation is threadsafe, and can be called on any thread.
class MediaDataDecoderCallback {
public:
virtual ~MediaDataDecoderCallback() {}
// Called by MediaDataDecoder when a sample has been decoded. Callee is
// responsibile for deleting aData.
virtual void Output(MediaData* aData) = 0;
// Denotes an error in the decoding process. The reader will stop calling
// the decoder.
virtual void Error() = 0;
// Denotes that the last input sample has been inserted into the decoder,
// and no more output can be produced unless more input is sent.
virtual void InputExhausted() = 0;
};
// MediaDataDecoder is the interface exposed by decoders created by the
// PlatformDecoderModule's Create*Decoder() functions. The type of
// media data that the decoder accepts as valid input and produces as
// output is determined when the MediaDataDecoder is created.
// The decoder is assumed to be in one of three mutually exclusive and
// implicit states: able to accept input, able to produce output, and
// shutdown. The decoder is assumed to be able to accept input by the time
// that it's returned by PlatformDecoderModule::Create*Decoder().
class MediaDataDecoder {
//
// All functions must be threadsafe, and be able to be called on an
// arbitrary thread.
//
// Decoding is done asynchronously. Any async work can be done on the
// MediaTaskQueue passed into the PlatformDecoderModules's Create*Decoder()
// function. This may not be necessary for platforms with async APIs
// for decoding.
class MediaDataDecoder : public AtomicRefCounted<MediaDataDecoder> {
public:
virtual ~MediaDataDecoder() {};
// Initialize the decoder. The decoder should be ready to decode after
// this returns. The decoder should do any initialization here, rather
// than in its constructor, so that if the MP4Reader needs to Shutdown()
// during initialization it can call Shutdown() to cancel this.
// Any initialization that requires blocking the calling thread *must*
// than in its constructor or PlatformDecoderModule::Create*Decoder(),
// so that if the MP4Reader needs to shutdown during initialization,
// it can call Shutdown() to cancel this operation. Any initialization
// that requires blocking the calling thread in this function *must*
// be done here so that it can be canceled by calling Shutdown()!
virtual nsresult Init() = 0;
// Inserts aData into the decoding pipeline. Decoding may begin
// asynchronously.
//
// If the decoder needs to assume ownership of the sample it may do so by
// calling forget() on aSample.
//
// If Input() returns DECODE_STATUS_NOT_ACCEPTING without forget()ing
// aSample, then the next call will have the same aSample. Otherwise
// the caller will delete aSample after Input() returns.
//
// The MP4Reader calls Input() in a loop until Input() stops returning
// DECODE_STATUS_OK. Input() should return DECODE_STATUS_NOT_ACCEPTING
// once the underlying decoder should have enough data to output decoded
// data.
//
// Called on the media decode thread.
// Returns:
// - DECODE_STATUS_OK if input was successfully inserted into
// the decode pipeline.
// - DECODE_STATUS_NOT_ACCEPTING if the decoder cannot accept any input
// at this time. The MP4Reader will assume that the decoder can now
// produce one or more output samples, and call the Output() function.
// The MP4Reader will call Input() again with the same data later,
// after the decoder's Output() function has stopped producing output,
// except if Input() called forget() on aSample, whereupon a new sample
// will come in next call.
// - DECODE_STATUS_ERROR if the decoder has been shutdown, or some
// unspecified error.
// This function should not return DECODE_STATUS_NEED_MORE_INPUT.
virtual DecoderStatus Input(nsAutoPtr<mp4_demuxer::MP4Sample>& aSample) = 0;
// Blocks until a decoded sample is produced by the deoder. The MP4Reader
// calls this until it stops returning DECODE_STATUS_OK.
// Called on the media decode thread.
// Returns:
// - DECODE_STATUS_OK if an output sample was successfully placed in
// aOutData. More samples for output may still be available, the
// MP4Reader will call again to check.
// - DECODE_STATUS_NEED_MORE_INPUT if the decoder needs more input to
// produce a sample. The decoder should return this once it can no
// longer produce output. This signals to the MP4Reader that it should
// start feeding in data via the Input() function.
// - DECODE_STATUS_ERROR if the decoder has been shutdown, or some
// unspecified error.
// This function should not return DECODE_STATUS_NOT_ACCEPTING.
virtual DecoderStatus Output(nsAutoPtr<MediaData>& aOutData) = 0;
// Inserts a sample into the decoder's decode pipeline. The decoder must
// delete the sample once its been decoded. If Input() returns an error,
// aSample will be deleted by the caller.
virtual nsresult Input(mp4_demuxer::MP4Sample* aSample) = 0;
// Causes all samples in the decoding pipeline to be discarded. When
// this function returns, the decoder must be ready to accept new input
// for decoding. This function is called when the demuxer seeks, before
// decoding resumes after the seek.
// Called on the media decode thread.
virtual DecoderStatus Flush() = 0;
// While the reader calls Flush(), it ignores all output sent to it;
// it is safe (but pointless) to send output while Flush is called.
// The MP4Reader will not call Input() while it's calling Flush().
virtual nsresult Flush() = 0;
// Cancels all decode operations, and shuts down the decoder. This should
// block until shutdown is complete. The decoder should return
// DECODE_STATUS_ERROR for all calls to its functions once this is called.
// Called on main thread.
// Causes all complete samples in the pipeline that can be decoded to be
// output. If the decoder can't produce samples from the current output,
// it drops the input samples. The decoder may be holding onto samples
// that are required to decode samples that it expects to get in future.
// This is called when the demuxer reaches end of stream.
// The MP4Reader will not call Input() while it's calling Drain().
virtual nsresult Drain() = 0;
// Cancels all init/input/drain operations, and shuts down the
// decoder. The platform decoder should clean up any resources it's using
// and release memory etc. Shutdown() must block until the decoder has
// completed shutdown. The reader calls Flush() before calling Shutdown().
// The reader will delete the decoder once Shutdown() returns.
// The MediaDataDecoderCallback *must* not be called after Shutdown() has
// returned.
virtual nsresult Shutdown() = 0;
};

Просмотреть файл

@ -54,15 +54,17 @@ UNIFIED_SOURCES += [
if CONFIG['MOZ_WMF']:
EXPORTS += [
'wmf/MFTDecoder.h',
'wmf/WMFAudioDecoder.h',
'wmf/WMFAudioOutputSource.h',
'wmf/WMFDecoderModule.h',
'wmf/WMFVideoDecoder.h',
'wmf/WMFMediaDataDecoder.h',
'wmf/WMFVideoOutputSource.h',
]
UNIFIED_SOURCES += [
'wmf/MFTDecoder.cpp',
'wmf/WMFAudioDecoder.cpp',
'wmf/WMFAudioOutputSource.cpp',
'wmf/WMFDecoderModule.cpp',
'wmf/WMFVideoDecoder.cpp',
'wmf/WMFMediaDataDecoder.cpp',
'wmf/WMFVideoOutputSource.cpp',
]
FINAL_LIBRARY = 'gklayout'

Просмотреть файл

@ -14,7 +14,7 @@
namespace mozilla {
class MFTDecoder {
class MFTDecoder : public AtomicRefCounted<MFTDecoder> {
public:
MFTDecoder();
~MFTDecoder();

Просмотреть файл

@ -1,74 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(WMFAudioDecoder_h_)
#define WMFAudioDecoder_h_
#include "WMF.h"
#include "MP4Reader.h"
#include "MFTDecoder.h"
namespace mozilla {
class WMFAudioDecoder : public MediaDataDecoder {
public:
WMFAudioDecoder(uint32_t aChannelCount,
uint32_t aSampleRate,
uint16_t aBitsPerSample,
const uint8_t* aUserData,
uint32_t aUserDataLength);
virtual nsresult Init() MOZ_OVERRIDE;
virtual nsresult Shutdown() MOZ_OVERRIDE;
// Inserts data into the decoder's pipeline.
virtual DecoderStatus Input(nsAutoPtr<mp4_demuxer::MP4Sample>& aSample);
// Blocks until a decoded sample is produced by the decoder.
virtual DecoderStatus Output(nsAutoPtr<MediaData>& aOutData);
virtual DecoderStatus Flush() MOZ_OVERRIDE;
private:
// A helper for Output() above. This has the same interface as Output()
// above, except that it returns DECODE_STATUS_OK and sets aOutData to
// nullptr when all the output samples have been stripped due to having
// negative timestamps. WMF's AAC decoder sometimes output negatively
// timestamped samples, presumably they're the preroll samples, and we
// strip them.
DecoderStatus OutputNonNegativeTimeSamples(nsAutoPtr<MediaData>& aOutData);
nsAutoPtr<MFTDecoder> mDecoder;
uint32_t mAudioChannels;
uint32_t mAudioBytesPerSample;
uint32_t mAudioRate;
nsTArray<BYTE> mUserData;
// The last offset into the media resource that was passed into Input().
// This is used to approximate the decoder's position in the media resource.
int64_t mLastStreamOffset;
// The offset, in audio frames, at which playback started since the
// last discontinuity.
int64_t mAudioFrameOffset;
// The number of audio frames that we've played since the last
// discontinuity.
int64_t mAudioFrameSum;
// True if we need to re-initialize mAudioFrameOffset and mAudioFrameSum
// from the next audio packet we decode. This happens after a seek, since
// WMF doesn't mark a stream as having a discontinuity after a seek(0).
bool mMustRecaptureAudioPosition;
};
} // namespace mozilla
#endif

Просмотреть файл

@ -4,7 +4,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "WMFAudioDecoder.h"
#include "WMFAudioOutputSource.h"
#include "VideoUtils.h"
#include "WMFUtils.h"
#include "nsTArray.h"
@ -18,7 +18,6 @@ PRLogModuleInfo* GetDemuxerLog();
#define LOG(...)
#endif
namespace mozilla {
static void
@ -66,115 +65,87 @@ AACAudioSpecificConfigToUserData(const uint8_t* aAudioSpecConfig,
aOutUserData.AppendElements(aAudioSpecConfig, aConfigLength);
}
WMFAudioDecoder::WMFAudioDecoder(uint32_t aChannelCount,
uint32_t aSampleRate,
uint16_t aBitsPerSample,
const uint8_t* aAudioSpecConfig,
uint32_t aConfigLength)
: mAudioChannels(aChannelCount),
mAudioBytesPerSample(aBitsPerSample / 8),
mAudioRate(aSampleRate),
mLastStreamOffset(0),
mAudioFrameOffset(0),
mAudioFrameSum(0),
mMustRecaptureAudioPosition(true)
WMFAudioOutputSource::WMFAudioOutputSource(const mp4_demuxer::AudioDecoderConfig& aConfig)
: mAudioChannels(ChannelLayoutToChannelCount(aConfig.channel_layout()))
, mAudioBytesPerSample(aConfig.bits_per_channel() / 8)
, mAudioRate(aConfig.samples_per_second())
, mAudioFrameOffset(0)
, mAudioFrameSum(0)
, mMustRecaptureAudioPosition(true)
{
AACAudioSpecificConfigToUserData(aAudioSpecConfig,
aConfigLength,
MOZ_COUNT_CTOR(WMFAudioOutputSource);
AACAudioSpecificConfigToUserData(aConfig.extra_data(),
aConfig.extra_data_size(),
mUserData);
}
nsresult
WMFAudioDecoder::Init()
WMFAudioOutputSource::~WMFAudioOutputSource()
{
mDecoder = new MFTDecoder();
MOZ_COUNT_DTOR(WMFAudioOutputSource);
}
HRESULT hr = mDecoder->Create(CLSID_CMSAACDecMFT);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
TemporaryRef<MFTDecoder>
WMFAudioOutputSource::Init()
{
RefPtr<MFTDecoder> decoder(new MFTDecoder());
HRESULT hr = decoder->Create(CLSID_CMSAACDecMFT);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
// Setup input/output media types
RefPtr<IMFMediaType> type;
hr = wmf::MFCreateMediaType(byRef(type));
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_AAC);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, mAudioRate);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, mAudioChannels);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetUINT32(MF_MT_AAC_PAYLOAD_TYPE, 0x1); // ADTS
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetBlob(MF_MT_USER_DATA,
mUserData.Elements(),
mUserData.Length());
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = mDecoder->SetMediaTypes(type, MFAudioFormat_PCM);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
hr = decoder->SetMediaTypes(type, MFAudioFormat_PCM);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
return NS_OK;
mDecoder = decoder;
return decoder.forget();
}
nsresult
WMFAudioDecoder::Shutdown()
HRESULT
WMFAudioOutputSource::Output(int64_t aStreamOffset,
nsAutoPtr<MediaData>& aOutData)
{
return NS_OK;
}
DecoderStatus
WMFAudioDecoder::Input(nsAutoPtr<mp4_demuxer::MP4Sample>& aSample)
{
mLastStreamOffset = aSample->byte_offset;
const uint8_t* data = &aSample->data->front();
uint32_t length = aSample->data->size();
HRESULT hr = mDecoder->Input(data, length, aSample->composition_timestamp);
if (hr == MF_E_NOTACCEPTING) {
return DECODE_STATUS_NOT_ACCEPTING;
}
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
return DECODE_STATUS_OK;
}
DecoderStatus
WMFAudioDecoder::Output(nsAutoPtr<MediaData>& aOutData)
{
DecoderStatus status;
do {
status = OutputNonNegativeTimeSamples(aOutData);
} while (status == DECODE_STATUS_OK && !aOutData);
return status;
}
DecoderStatus
WMFAudioDecoder::OutputNonNegativeTimeSamples(nsAutoPtr<MediaData>& aOutData)
{
aOutData = nullptr;
RefPtr<IMFSample> sample;
HRESULT hr = mDecoder->Output(&sample);
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
return DECODE_STATUS_NEED_MORE_INPUT;
return MF_E_TRANSFORM_NEED_MORE_INPUT;
}
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFMediaBuffer> buffer;
hr = sample->ConvertToContiguousBuffer(byRef(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we don't need to free it.
DWORD maxLength = 0, currentLength = 0;
hr = buffer->Lock(&data, &maxLength, &currentLength);
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
int32_t numSamples = currentLength / mAudioBytesPerSample;
int32_t numFrames = numSamples / mAudioChannels;
@ -203,9 +174,9 @@ WMFAudioDecoder::OutputNonNegativeTimeSamples(nsAutoPtr<MediaData>& aOutData)
mAudioFrameSum = 0;
LONGLONG timestampHns = 0;
hr = sample->GetSampleTime(&timestampHns);
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = HNsToFrames(timestampHns, mAudioRate, &mAudioFrameOffset);
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
if (mAudioFrameOffset < 0) {
// First sample has a negative timestamp. Strip off the samples until
// we reach positive territory.
@ -223,7 +194,7 @@ WMFAudioDecoder::OutputNonNegativeTimeSamples(nsAutoPtr<MediaData>& aOutData)
if (numFrames == 0) {
// All data from this chunk stripped, loop back and try to output the next
// frame, if possible.
return DECODE_STATUS_OK;
return S_OK;
}
nsAutoArrayPtr<AudioDataValue> audioData(new AudioDataValue[numSamples]);
@ -241,36 +212,27 @@ WMFAudioDecoder::OutputNonNegativeTimeSamples(nsAutoPtr<MediaData>& aOutData)
buffer->Unlock();
int64_t timestamp;
hr = FramesToUsecs(mAudioFrameOffset + mAudioFrameSum, mAudioRate, &timestamp);
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
mAudioFrameSum += numFrames;
int64_t duration;
hr = FramesToUsecs(numFrames, mAudioRate, &duration);
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
aOutData = new AudioData(mLastStreamOffset,
timestamp,
duration,
numFrames,
audioData.forget(),
mAudioChannels);
aOutData = new AudioData(aStreamOffset,
timestamp,
duration,
numFrames,
audioData.forget(),
mAudioChannels);
#ifdef LOG_SAMPLE_DECODE
LOG("Decoded audio sample! timestamp=%lld duration=%lld currentLength=%u",
timestamp, duration, currentLength);
#endif
return DECODE_STATUS_OK;
}
DecoderStatus
WMFAudioDecoder::Flush()
{
NS_ENSURE_TRUE(mDecoder, DECODE_STATUS_ERROR);
HRESULT hr = mDecoder->Flush();
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
return DECODE_STATUS_OK;
return S_OK;
}
} // namespace mozilla

Просмотреть файл

@ -0,0 +1,55 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(WMFAudioOutputSource_h_)
#define WMFAudioOutputSource_h_
#include "WMF.h"
#include "MP4Reader.h"
#include "MFTDecoder.h"
#include "mozilla/RefPtr.h"
#include "WMFMediaDataDecoder.h"
namespace mozilla {
class WMFAudioOutputSource : public WMFOutputSource {
public:
WMFAudioOutputSource(const mp4_demuxer::AudioDecoderConfig& aConfig);
~WMFAudioOutputSource();
virtual TemporaryRef<MFTDecoder> Init() MOZ_OVERRIDE;
// Note WMF's AAC decoder sometimes output negatively timestamped samples,
// presumably they're the preroll samples, and we strip them. We may return
// a null aOutput in this case.
virtual HRESULT Output(int64_t aStreamOffset,
nsAutoPtr<MediaData>& aOutput) MOZ_OVERRIDE;
private:
// IMFTransform wrapper that performs the decoding.
RefPtr<MFTDecoder> mDecoder;
const uint32_t mAudioChannels;
const uint32_t mAudioBytesPerSample;
const uint32_t mAudioRate;
nsTArray<BYTE> mUserData;
// The offset, in audio frames, at which playback started since the
// last discontinuity.
int64_t mAudioFrameOffset;
// The number of audio frames that we've played since the last
// discontinuity.
int64_t mAudioFrameSum;
// True if we need to re-initialize mAudioFrameOffset and mAudioFrameSum
// from the next audio packet we decode. This happens after a seek, since
// WMF doesn't mark a stream as having a discontinuity after a seek(0).
bool mMustRecaptureAudioPosition;
};
} // namespace mozilla
#endif // WMFAudioOutputSource_h_

Просмотреть файл

@ -7,11 +7,12 @@
#include "WMF.h"
#include "WMFDecoderModule.h"
#include "WMFDecoder.h"
#include "WMFVideoDecoder.h"
#include "WMFAudioDecoder.h"
#include "WMFVideoOutputSource.h"
#include "WMFAudioOutputSource.h"
#include "mozilla/Preferences.h"
#include "mozilla/DebugOnly.h"
#include "mp4_demuxer/audio_decoder_config.h"
#include "WMFMediaDataDecoder.h"
namespace mozilla {
@ -68,40 +69,25 @@ WMFDecoderModule::Shutdown()
MediaDataDecoder*
WMFDecoderModule::CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
mozilla::layers::LayersBackend aLayersBackend,
mozilla::layers::ImageContainer* aImageContainer)
mozilla::layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback)
{
return new WMFVideoDecoder(aLayersBackend,
aImageContainer,
sDXVAEnabled);
return new WMFMediaDataDecoder(new WMFVideoOutputSource(aLayersBackend,
aImageContainer,
sDXVAEnabled),
aVideoTaskQueue,
aCallback);
}
MediaDataDecoder*
WMFDecoderModule::CreateAACDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig)
WMFDecoderModule::CreateAACDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,
MediaTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback)
{
return new WMFAudioDecoder(ChannelLayoutToChannelCount(aConfig.channel_layout()),
aConfig.samples_per_second(),
aConfig.bits_per_channel(),
aConfig.extra_data(),
aConfig.extra_data_size());
}
void
WMFDecoderModule::OnDecodeThreadStart()
{
MOZ_ASSERT(!NS_IsMainThread(), "Must not be on main thread.");
// XXX WebAudio can call this on the main thread when using deprecated APIs.
// That should not happen. You cannot change the concurrency model once already set.
// The main thread will continue to be STA, which seems to work, but MSDN
// recommends that MTA be used.
// TODO: enforce that WebAudio stops doing that!
CoInitializeEx(0, COINIT_MULTITHREADED);
}
void
WMFDecoderModule::OnDecodeThreadFinish()
{
MOZ_ASSERT(!NS_IsMainThread(), "Must be on main thread.");
CoUninitialize();
return new WMFMediaDataDecoder(new WMFAudioOutputSource(aConfig),
aAudioTaskQueue,
aCallback);
}
} // namespace mozilla

Просмотреть файл

@ -28,15 +28,15 @@ public:
virtual MediaDataDecoder*
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
mozilla::layers::LayersBackend aLayersBackend,
mozilla::layers::ImageContainer* aImageContainer) MOZ_OVERRIDE;
mozilla::layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
// Decode thread.
virtual MediaDataDecoder* CreateAACDecoder(
const mp4_demuxer::AudioDecoderConfig& aConfig) MOZ_OVERRIDE;
// Platform decoders can override these. Base implementation does nothing.
virtual void OnDecodeThreadStart() MOZ_OVERRIDE;
virtual void OnDecodeThreadFinish() MOZ_OVERRIDE;
const mp4_demuxer::AudioDecoderConfig& aConfig,
MediaTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
static void Init();
private:

Просмотреть файл

@ -0,0 +1,138 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "WMFMediaDataDecoder.h"
#include "VideoUtils.h"
#include "WMFUtils.h"
#include "nsTArray.h"
#include "prlog.h"
#ifdef PR_LOGGING
PRLogModuleInfo* GetDemuxerLog();
#define LOG(...) PR_LOG(GetDemuxerLog(), PR_LOG_DEBUG, (__VA_ARGS__))
#else
#define LOG(...)
#endif
namespace mozilla {
WMFMediaDataDecoder::WMFMediaDataDecoder(WMFOutputSource* aSource,
MediaTaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback)
: mTaskQueue(aTaskQueue)
, mCallback(aCallback)
, mSource(aSource)
{
MOZ_COUNT_CTOR(WMFMediaDataDecoder);
}
WMFMediaDataDecoder::~WMFMediaDataDecoder()
{
MOZ_COUNT_DTOR(WMFMediaDataDecoder);
}
nsresult
WMFMediaDataDecoder::Init()
{
mDecoder = mSource->Init();
NS_ENSURE_TRUE(mDecoder, NS_ERROR_FAILURE);
return NS_OK;
}
nsresult
WMFMediaDataDecoder::Shutdown()
{
mDecoder = nullptr;
return NS_OK;
}
// Inserts data into the decoder's pipeline.
nsresult
WMFMediaDataDecoder::Input(mp4_demuxer::MP4Sample* aSample)
{
mTaskQueue->Dispatch(
NS_NewRunnableMethodWithArg<nsAutoPtr<mp4_demuxer::MP4Sample>>(
this,
&WMFMediaDataDecoder::ProcessDecode,
nsAutoPtr<mp4_demuxer::MP4Sample>(aSample)));
return NS_OK;
}
void
WMFMediaDataDecoder::ProcessDecode(mp4_demuxer::MP4Sample* aSample)
{
const uint8_t* data = &aSample->data->front();
uint32_t length = aSample->data->size();
HRESULT hr = mDecoder->Input(data, length, aSample->composition_timestamp);
if (FAILED(hr)) {
NS_WARNING("WMFAudioDecoder failed to input data");
mCallback->Error();
return;
}
mLastStreamOffset = aSample->byte_offset;
ProcessOutput();
}
void
WMFMediaDataDecoder::ProcessOutput()
{
nsAutoPtr<MediaData> output;
HRESULT hr = S_OK;
while (SUCCEEDED(hr = mSource->Output(mLastStreamOffset, output)) &&
output) {
mCallback->Output(output.forget());
}
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
if (mTaskQueue->IsEmpty()) {
mCallback->InputExhausted();
}
} else if (FAILED(hr)) {
NS_WARNING("WMFMediaDataDecoder failed to output data");
mCallback->Error();
}
}
nsresult
WMFMediaDataDecoder::Flush()
{
// Flush the input task queue. This cancels all pending Decode() calls.
// Note this blocks until the task queue finishes its current job, if
// it's executing at all. Note the MP4Reader ignores all output while
// flushing.
mTaskQueue->Flush();
// Order the MFT to flush; drop all internal data.
NS_ENSURE_TRUE(mDecoder, NS_ERROR_FAILURE);
HRESULT hr = mDecoder->Flush();
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
return NS_OK;
}
void
WMFMediaDataDecoder::ProcessDrain()
{
// Order the decoder to drain...
if (FAILED(mDecoder->SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0))) {
NS_WARNING("Failed to send DRAIN command to audio MFT");
}
// Then extract all available output.
ProcessOutput();
}
nsresult
WMFMediaDataDecoder::Drain()
{
mTaskQueue->Dispatch(NS_NewRunnableMethod(this, &WMFMediaDataDecoder::ProcessDrain));
return NS_OK;
}
} // namespace mozilla

Просмотреть файл

@ -0,0 +1,88 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(WMFMediaDataDecoder_h_)
#define WMFMediaDataDecoder_h_
#include "WMF.h"
#include "MP4Reader.h"
#include "MFTDecoder.h"
#include "mozilla/RefPtr.h"
namespace mozilla {
// Encapsulates the initialization of the MFTDecoder appropriate for decoding
// a given stream, and the process of converting the IMFSample produced
// by the MFT into a MediaData object.
class WMFOutputSource {
public:
virtual ~WMFOutputSource() {}
// Creates an initializs the MFTDecoder.
// Returns nullptr on failure.
virtual TemporaryRef<MFTDecoder> Init() = 0;
// Produces decoded output, if possible. Blocks until output can be produced,
// or until no more is able to be produced.
// Returns S_OK on success, or MF_E_TRANSFORM_NEED_MORE_INPUT if there's not
// enough data to produce more output. If this returns a failure code other
// than MF_E_TRANSFORM_NEED_MORE_INPUT, an error will be reported to the
// MP4Reader.
virtual HRESULT Output(int64_t aStreamOffset,
nsAutoPtr<MediaData>& aOutput) = 0;
};
// Decodes audio and video using Windows Media Foundation. Samples are decoded
// using the MFTDecoder created by the WMFOutputSource. This class implements
// the higher-level logic that drives mapping the MFT to the async
// MediaDataDecoder interface. The specifics of decoding the exact stream
// type are handled by WMFOutputSource and the MFTDecoder it creates.
class WMFMediaDataDecoder : public MediaDataDecoder {
public:
WMFMediaDataDecoder(WMFOutputSource* aOutputSource,
MediaTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback);
~WMFMediaDataDecoder();
virtual nsresult Init() MOZ_OVERRIDE;
virtual nsresult Input(mp4_demuxer::MP4Sample* aSample);
virtual nsresult Flush() MOZ_OVERRIDE;
virtual nsresult Drain() MOZ_OVERRIDE;
virtual nsresult Shutdown() MOZ_OVERRIDE;
private:
// Called on the task queue. Inserts the sample into the decoder, and
// extracts output if available.
void ProcessDecode(mp4_demuxer::MP4Sample* aSample);
// Called on the task queue. Extracts output if available, and delivers
// it to the reader. Called after ProcessDecode() and ProcessDrain().
void ProcessOutput();
// Called on the task queue. Orders the MFT to drain, and then extracts
// all available output.
void ProcessDrain();
RefPtr<MediaTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
RefPtr<MFTDecoder> mDecoder;
nsAutoPtr<WMFOutputSource> mSource;
// The last offset into the media resource that was passed into Input().
// This is used to approximate the decoder's position in the media resource.
int64_t mLastStreamOffset;
};
} // namespace mozilla
#endif // WMFMediaDataDecoder_h_

Просмотреть файл

@ -4,7 +4,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "WMFVideoDecoder.h"
#include "WMFVideoOutputSource.h"
#include "MediaDecoderReader.h"
#include "WMFUtils.h"
#include "ImageContainer.h"
@ -28,26 +28,25 @@ using mozilla::layers::LayersBackend;
namespace mozilla {
WMFVideoDecoder::WMFVideoDecoder(mozilla::layers::LayersBackend aLayersBackend,
WMFVideoOutputSource::WMFVideoOutputSource(mozilla::layers::LayersBackend aLayersBackend,
mozilla::layers::ImageContainer* aImageContainer,
bool aDXVAEnabled)
: mVideoStride(0),
mVideoWidth(0),
mVideoHeight(0),
mLastStreamOffset(0),
mImageContainer(aImageContainer),
mDXVAEnabled(aDXVAEnabled),
mLayersBackend(aLayersBackend),
mUseHwAccel(false)
: mVideoStride(0)
, mVideoWidth(0)
, mVideoHeight(0)
, mImageContainer(aImageContainer)
, mDXVAEnabled(aDXVAEnabled)
, mLayersBackend(aLayersBackend)
, mUseHwAccel(false)
{
NS_ASSERTION(!NS_IsMainThread(), "Should not be on main thread.");
MOZ_ASSERT(mImageContainer);
MOZ_COUNT_CTOR(WMFVideoDecoder);
MOZ_COUNT_CTOR(WMFVideoOutputSource);
}
WMFVideoDecoder::~WMFVideoDecoder()
WMFVideoOutputSource::~WMFVideoOutputSource()
{
MOZ_COUNT_DTOR(WMFVideoDecoder);
MOZ_COUNT_DTOR(WMFVideoOutputSource);
}
class CreateDXVAManagerEvent : public nsRunnable {
@ -61,7 +60,7 @@ public:
};
bool
WMFVideoDecoder::InitializeDXVA()
WMFVideoOutputSource::InitializeDXVA()
{
// If we use DXVA but aren't running with a D3D layer manager then the
// readback of decoded video frames from GPU to CPU memory grinds painting
@ -80,18 +79,18 @@ WMFVideoDecoder::InitializeDXVA()
return mDXVA2Manager != nullptr;
}
nsresult
WMFVideoDecoder::Init()
TemporaryRef<MFTDecoder>
WMFVideoOutputSource::Init()
{
bool useDxva = InitializeDXVA();
mDecoder = new MFTDecoder();
RefPtr<MFTDecoder> decoder(new MFTDecoder());
HRESULT hr = mDecoder->Create(CLSID_CMSH264DecoderMFT);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
HRESULT hr = decoder->Create(CLSID_CMSH264DecoderMFT);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
if (useDxva) {
RefPtr<IMFAttributes> attr(mDecoder->GetAttributes());
RefPtr<IMFAttributes> attr(decoder->GetAttributes());
UINT32 aware = 0;
if (attr) {
@ -103,7 +102,7 @@ WMFVideoDecoder::Init()
//NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
MOZ_ASSERT(mDXVA2Manager);
ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
hr = mDecoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager);
hr = decoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager);
if (SUCCEEDED(hr)) {
mUseHwAccel = true;
}
@ -113,28 +112,29 @@ WMFVideoDecoder::Init()
// Setup the input/output media types.
RefPtr<IMFMediaType> type;
hr = wmf::MFCreateMediaType(byRef(type));
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
GUID outputType = mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
hr = mDecoder->SetMediaTypes(type, outputType);
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
hr = decoder->SetMediaTypes(type, outputType);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
mDecoder = decoder;
LOG("Video Decoder initialized, Using DXVA: %s", (mUseHwAccel ? "Yes" : "No"));
return NS_OK;
return decoder.forget();
}
HRESULT
WMFVideoDecoder::ConfigureVideoFrameGeometry()
WMFVideoOutputSource::ConfigureVideoFrameGeometry()
{
RefPtr<IMFMediaType> mediaType;
HRESULT hr = mDecoder->GetOutputMediaType(mediaType);
@ -194,30 +194,9 @@ WMFVideoDecoder::ConfigureVideoFrameGeometry()
return S_OK;
}
nsresult
WMFVideoDecoder::Shutdown()
{
return NS_OK;
}
// Inserts data into the decoder's pipeline.
DecoderStatus
WMFVideoDecoder::Input(nsAutoPtr<mp4_demuxer::MP4Sample>& aSample)
{
mLastStreamOffset = aSample->byte_offset;
const uint8_t* data = &aSample->data->front();
uint32_t length = aSample->data->size();
HRESULT hr = mDecoder->Input(data, length, aSample->composition_timestamp);
if (hr == MF_E_NOTACCEPTING) {
return DECODE_STATUS_NOT_ACCEPTING;
}
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
return DECODE_STATUS_OK;
}
HRESULT
WMFVideoDecoder::CreateBasicVideoFrame(IMFSample* aSample,
WMFVideoOutputSource::CreateBasicVideoFrame(IMFSample* aSample,
int64_t aStreamOffset,
VideoData** aOutVideoData)
{
NS_ENSURE_TRUE(aSample, E_POINTER);
@ -292,7 +271,7 @@ WMFVideoDecoder::CreateBasicVideoFrame(IMFSample* aSample,
Microseconds duration = GetSampleDuration(aSample);
VideoData *v = VideoData::Create(mVideoInfo,
mImageContainer,
mLastStreamOffset,
aStreamOffset,
pts,
duration,
b,
@ -311,7 +290,8 @@ WMFVideoDecoder::CreateBasicVideoFrame(IMFSample* aSample,
}
HRESULT
WMFVideoDecoder::CreateD3DVideoFrame(IMFSample* aSample,
WMFVideoOutputSource::CreateD3DVideoFrame(IMFSample* aSample,
int64_t aStreamOffset,
VideoData** aOutVideoData)
{
NS_ENSURE_TRUE(aSample, E_POINTER);
@ -334,7 +314,7 @@ WMFVideoDecoder::CreateD3DVideoFrame(IMFSample* aSample,
Microseconds duration = GetSampleDuration(aSample);
VideoData *v = VideoData::CreateFromImage(mVideoInfo,
mImageContainer,
mLastStreamOffset,
aStreamOffset,
pts,
duration,
image.forget(),
@ -349,22 +329,20 @@ WMFVideoDecoder::CreateD3DVideoFrame(IMFSample* aSample,
}
// Blocks until decoded sample is produced by the deoder.
DecoderStatus
WMFVideoDecoder::Output(nsAutoPtr<MediaData>& aOutData)
HRESULT
WMFVideoOutputSource::Output(int64_t aStreamOffset,
nsAutoPtr<MediaData>& aOutData)
{
RefPtr<IMFSample> sample;
HRESULT hr;
aOutData = nullptr;
// Loop until we decode a sample, or an unexpected error that we can't
// handle occurs.
while (true) {
hr = mDecoder->Output(&sample);
if (SUCCEEDED(hr)) {
NS_ENSURE_TRUE(sample, DECODE_STATUS_ERROR);
break;
}
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
return DECODE_STATUS_NEED_MORE_INPUT;
return MF_E_TRANSFORM_NEED_MORE_INPUT;
}
if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
// Video stream output type change. Probably a geometric apperature
@ -372,37 +350,32 @@ WMFVideoDecoder::Output(nsAutoPtr<MediaData>& aOutData)
// correct size frames.
MOZ_ASSERT(!sample);
hr = ConfigureVideoFrameGeometry();
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
// Loop back and try decoding again...
continue;
}
if (SUCCEEDED(hr)) {
break;
}
// Else unexpected error, assert, and bail.
NS_WARNING("WMFVideoDecoder::Output() unexpected error");
return DECODE_STATUS_ERROR;
NS_WARNING("WMFVideoOutputSource::Output() unexpected error");
return E_FAIL;
}
VideoData* frame = nullptr;
if (mUseHwAccel) {
hr = CreateD3DVideoFrame(sample, &frame);
hr = CreateD3DVideoFrame(sample, aStreamOffset, &frame);
} else {
hr = CreateBasicVideoFrame(sample, &frame);
hr = CreateBasicVideoFrame(sample, aStreamOffset, &frame);
}
// Frame should be non null only when we succeeded.
MOZ_ASSERT((frame != nullptr) == SUCCEEDED(hr));
NS_ENSURE_TRUE(SUCCEEDED(hr) && frame, DECODE_STATUS_ERROR);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
NS_ENSURE_TRUE(frame, E_FAIL);
aOutData = frame;
return DECODE_STATUS_OK;
}
DecoderStatus
WMFVideoDecoder::Flush()
{
NS_ENSURE_TRUE(mDecoder, DECODE_STATUS_ERROR);
HRESULT hr = mDecoder->Flush();
NS_ENSURE_TRUE(SUCCEEDED(hr), DECODE_STATUS_ERROR);
return DECODE_STATUS_OK;
return S_OK;
}
} // namespace mozilla

Просмотреть файл

@ -4,39 +4,31 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(WMFVideoDecoder_h_)
#define WMFVideoDecoder_h_
#if !defined(WMFVideoOutputSource_h_)
#define WMFVideoOutputSource_h_
#include "WMF.h"
#include "MP4Reader.h"
#include "MFTDecoder.h"
#include "nsRect.h"
#include "WMFMediaDataDecoder.h"
#include "mozilla/RefPtr.h"
namespace mozilla {
class DXVA2Manager;
class WMFVideoDecoder : public MediaDataDecoder {
class WMFVideoOutputSource : public WMFOutputSource {
public:
WMFVideoDecoder(mozilla::layers::LayersBackend aLayersBackend,
mozilla::layers::ImageContainer* aImageContainer,
bool aDXVAEnabled);
~WMFVideoDecoder();
WMFVideoOutputSource(mozilla::layers::LayersBackend aLayersBackend,
mozilla::layers::ImageContainer* aImageContainer,
bool aDXVAEnabled);
~WMFVideoOutputSource();
// Decode thread.
virtual nsresult Init() MOZ_OVERRIDE;
virtual TemporaryRef<MFTDecoder> Init() MOZ_OVERRIDE;
virtual nsresult Shutdown() MOZ_OVERRIDE;
// Inserts data into the decoder's pipeline.
virtual DecoderStatus Input(nsAutoPtr<mp4_demuxer::MP4Sample>& aSample) MOZ_OVERRIDE;
// Blocks until a decoded sample is produced by the decoder.
virtual DecoderStatus Output(nsAutoPtr<MediaData>& aOutData) MOZ_OVERRIDE;
virtual DecoderStatus Flush() MOZ_OVERRIDE;
virtual HRESULT Output(int64_t aStreamOffset,
nsAutoPtr<MediaData>& aOutput) MOZ_OVERRIDE;
private:
@ -45,10 +37,13 @@ private:
HRESULT ConfigureVideoFrameGeometry();
HRESULT CreateBasicVideoFrame(IMFSample* aSample,
int64_t aStreamOffset,
VideoData** aOutVideoData);
HRESULT CreateD3DVideoFrame(IMFSample* aSample,
int64_t aStreamOffset,
VideoData** aOutVideoData);
// Video frame geometry.
VideoInfo mVideoInfo;
uint32_t mVideoStride;
@ -56,21 +51,17 @@ private:
uint32_t mVideoHeight;
nsIntRect mPictureRegion;
// The last offset into the media resource that was passed into Input().
// This is used to approximate the decoder's position in the media resource.
int64_t mLastStreamOffset;
nsAutoPtr<MFTDecoder> mDecoder;
RefPtr<MFTDecoder> mDecoder;
RefPtr<layers::ImageContainer> mImageContainer;
nsAutoPtr<DXVA2Manager> mDXVA2Manager;
RefPtr<MediaTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
const bool mDXVAEnabled;
const layers::LayersBackend mLayersBackend;
bool mUseHwAccel;
};
} // namespace mozilla
#endif
#endif // WMFVideoOutputSource_h_

Просмотреть файл

@ -532,20 +532,20 @@ bool GStreamerReader::DecodeAudioData()
timestamp = gst_segment_to_stream_time(&mAudioSegment,
GST_FORMAT_TIME, timestamp);
timestamp = GST_TIME_AS_USECONDS(timestamp);
int64_t duration = 0;
if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
int64_t offset = GST_BUFFER_OFFSET(buffer);
unsigned int size = GST_BUFFER_SIZE(buffer);
int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;
ssize_t outSize = static_cast<size_t>(size / sizeof(AudioDataValue));
nsAutoArrayPtr<AudioDataValue> data(new AudioDataValue[outSize]);
memcpy(data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
AudioData* audio = new AudioData(offset, timestamp, duration,
frames, data.forget(), mInfo.mAudio.mChannels);
mAudioQueue.Push(audio);
typedef AudioCompactor::NativeCopy GstCopy;
mAudioCompactor.Push(offset,
timestamp,
mInfo.mAudio.mRate,
frames,
mInfo.mAudio.mChannels,
GstCopy(GST_BUFFER_DATA(buffer),
size,
mInfo.mAudio.mChannels));
gst_buffer_unref(buffer);
return true;

Просмотреть файл

@ -0,0 +1,131 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "gtest/gtest.h"
#include "AudioCompactor.h"
#include "MediaDecoderReader.h"
using mozilla::AudioCompactor;
using mozilla::AudioData;
using mozilla::AudioDataValue;
using mozilla::MediaDecoderReader;
using mozilla::MediaQueue;
class TestCopy
{
public:
TestCopy(uint32_t aFrames, uint32_t aChannels,
uint32_t &aCallCount, uint32_t &aFrameCount)
: mFrames(aFrames)
, mChannels(aChannels)
, mCallCount(aCallCount)
, mFrameCount(aFrameCount)
{ }
uint32_t operator()(AudioDataValue *aBuffer, uint32_t aSamples)
{
mCallCount += 1;
uint32_t frames = std::min(mFrames - mFrameCount, aSamples / mChannels);
mFrameCount += frames;
return frames;
}
private:
const uint32_t mFrames;
const uint32_t mChannels;
uint32_t &mCallCount;
uint32_t &mFrameCount;
};
static void TestAudioCompactor(size_t aBytes)
{
MediaQueue<AudioData> queue;
AudioCompactor compactor(queue);
uint64_t offset = 0;
uint64_t time = 0;
uint32_t sampleRate = 44000;
uint32_t channels = 2;
uint32_t frames = aBytes / (channels * sizeof(AudioDataValue));
size_t maxSlop = aBytes / AudioCompactor::MAX_SLOP_DIVISOR;
uint32_t callCount = 0;
uint32_t frameCount = 0;
compactor.Push(offset, time, sampleRate, frames, channels,
TestCopy(frames, channels, callCount, frameCount));
EXPECT_GT(callCount, 0U) << "copy functor never called";
EXPECT_EQ(frames, frameCount) << "incorrect number of frames copied";
MediaDecoderReader::AudioQueueMemoryFunctor memoryFunc;
queue.LockedForEach(memoryFunc);
size_t allocSize = memoryFunc.mSize - (callCount * sizeof(AudioData));
size_t slop = allocSize - aBytes;
EXPECT_LE(slop, maxSlop) << "allowed too much allocation slop";
}
TEST(Media, AudioCompactor_4000)
{
TestAudioCompactor(4000);
}
TEST(Media, AudioCompactor_4096)
{
TestAudioCompactor(4096);
}
TEST(Media, AudioCompactor_5000)
{
TestAudioCompactor(5000);
}
TEST(Media, AudioCompactor_5256)
{
TestAudioCompactor(5256);
}
TEST(Media, AudioCompactor_NativeCopy)
{
const uint32_t channels = 2;
const size_t srcBytes = 32;
const uint32_t srcSamples = srcBytes / sizeof(AudioDataValue);
const uint32_t srcFrames = srcSamples / channels;
uint8_t src[srcBytes];
for (uint32_t i = 0; i < srcBytes; ++i) {
src[i] = i;
}
AudioCompactor::NativeCopy copy(src, srcBytes, channels);
const uint32_t dstSamples = srcSamples * 2;
AudioDataValue dst[dstSamples];
const AudioDataValue notCopied = 0xffff;
for (uint32_t i = 0; i < dstSamples; ++i) {
dst[i] = notCopied;
}
const uint32_t copyCount = 8;
uint32_t copiedFrames = 0;
uint32_t nextSample = 0;
for (uint32_t i = 0; i < copyCount; ++i) {
uint32_t copySamples = dstSamples / copyCount;
copiedFrames += copy(dst + nextSample, copySamples);
nextSample += copySamples;
}
EXPECT_EQ(srcFrames, copiedFrames) << "copy exact number of source frames";
// Verify that the only the correct bytes were copied.
for (uint32_t i = 0; i < dstSamples; ++i) {
if (i < srcSamples) {
EXPECT_NE(notCopied, dst[i]) << "should have copied over these bytes";
} else {
EXPECT_EQ(notCopied, dst[i]) << "should not have copied over these bytes";
}
}
}

Просмотреть файл

@ -7,6 +7,7 @@
LIBRARY_NAME = 'media_gtest'
UNIFIED_SOURCES += [
'TestAudioCompactor.cpp',
'TestTrackEncoder.cpp',
]

Просмотреть файл

@ -58,6 +58,7 @@ EXPORTS += [
'AbstractMediaDecoder.h',
'AudioAvailableEventManager.h',
'AudioChannelFormat.h',
'AudioCompactor.h',
'AudioEventTimeline.h',
'AudioNodeEngine.h',
'AudioNodeExternalInputStream.h',
@ -85,6 +86,7 @@ EXPORTS += [
'MediaResource.h',
'MediaSegment.h',
'MediaStreamGraph.h',
'MediaTaskQueue.h',
'MP3FrameParser.h',
'RtspMediaResource.h',
'SharedBuffer.h',
@ -114,6 +116,7 @@ EXPORTS.mozilla.dom += [
UNIFIED_SOURCES += [
'AudioAvailableEventManager.cpp',
'AudioChannelFormat.cpp',
'AudioCompactor.cpp',
'AudioNodeEngine.cpp',
'AudioNodeExternalInputStream.cpp',
'AudioNodeStream.cpp',
@ -134,6 +137,7 @@ UNIFIED_SOURCES += [
'MediaShutdownManager.cpp',
'MediaStreamGraph.cpp',
'MediaStreamTrack.cpp',
'MediaTaskQueue.cpp',
'MP3FrameParser.cpp',
'RtspMediaResource.cpp',
'SharedThreadPool.cpp',

Просмотреть файл

@ -312,33 +312,29 @@ bool MediaOmxReader::DecodeAudioData()
int64_t pos = mDecoder->GetResource()->Tell();
// Read next frame
MPAPI::AudioFrame frame;
if (!mOmxDecoder->ReadAudio(&frame, mAudioSeekTimeUs)) {
MPAPI::AudioFrame source;
if (!mOmxDecoder->ReadAudio(&source, mAudioSeekTimeUs)) {
return false;
}
mAudioSeekTimeUs = -1;
// Ignore empty buffer which stagefright media read will sporadically return
if (frame.mSize == 0) {
if (source.mSize == 0) {
return true;
}
nsAutoArrayPtr<AudioDataValue> buffer(new AudioDataValue[frame.mSize/2] );
memcpy(buffer.get(), frame.mData, frame.mSize);
uint32_t frames = source.mSize / (source.mAudioChannels *
sizeof(AudioDataValue));
uint32_t frames = frame.mSize / (2 * frame.mAudioChannels);
CheckedInt64 duration = FramesToUsecs(frames, frame.mAudioSampleRate);
if (!duration.isValid()) {
return false;
}
mAudioQueue.Push(new AudioData(pos,
frame.mTimeUs,
duration.value(),
frames,
buffer.forget(),
frame.mAudioChannels));
return true;
typedef AudioCompactor::NativeCopy OmxCopy;
return mAudioCompactor.Push(pos,
source.mTimeUs,
source.mAudioSampleRate,
frames,
source.mAudioChannels,
OmxCopy(static_cast<uint8_t *>(source.mData),
source.mSize,
source.mAudioChannels));
}
nsresult MediaOmxReader::Seek(int64_t aTarget, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime)

Двоичные данные
content/media/test/detodos.webm

Двоичный файл не отображается.

Двоичные данные
content/media/test/invalid-preskip.webm Normal file

Двоичный файл не отображается.

Просмотреть файл

@ -233,6 +233,7 @@ var gInvalidTests = [
{ name:"invalid-cmap-s0c0.opus", type:"audio/ogg; codecs=opus"},
{ name:"invalid-cmap-s0c2.opus", type:"audio/ogg; codecs=opus"},
{ name:"invalid-cmap-s1c2.opus", type:"audio/ogg; codecs=opus"},
{ name:"invalid-preskip.webm", type:"audio/webm; codecs=opus"},
];
// Converts a path/filename to a file:// URI which we can load from disk.

Просмотреть файл

@ -110,6 +110,7 @@ support-files =
invalid-m1c9.opus
invalid-m2c0.opus
invalid-m2c1.opus
invalid-preskip.webm
long.vtt
manifest.js
multiple-bos.ogg

Просмотреть файл

@ -185,32 +185,33 @@ public:
void CopyFromInputBufferWithResampling(AudioNodeStream* aStream,
AudioChunk* aOutput,
uint32_t aChannels,
uintptr_t aSourceOffset,
uintptr_t aBufferOffset,
uint32_t aAvailableInInputBuffer,
uint32_t& aFramesWritten) {
uint32_t aOffsetWithinBlock,
uint32_t& aFramesWritten,
uint32_t aBufferOffset,
uint32_t aBufferMax) {
// TODO: adjust for mStop (see bug 913854 comment 9).
uint32_t availableInOutputBuffer = WEBAUDIO_BLOCK_SIZE - aBufferOffset;
uint32_t availableInOutputBuffer = WEBAUDIO_BLOCK_SIZE - aOffsetWithinBlock;
SpeexResamplerState* resampler = Resampler(aStream, aChannels);
MOZ_ASSERT(aChannels > 0);
if (aAvailableInInputBuffer) {
if (aBufferOffset < aBufferMax) {
uint32_t availableInInputBuffer = aBufferMax - aBufferOffset;
// Limit the number of input samples copied and possibly
// format-converted for resampling by estimating how many will be used.
// This may be a little small when filling the resampler with initial
// data, but we'll get called again and it will work out.
uint32_t num, den;
speex_resampler_get_ratio(resampler, &num, &den);
uint32_t inputLimit = std::min(aAvailableInInputBuffer,
uint32_t inputLimit = std::min(availableInInputBuffer,
availableInOutputBuffer * den / num + 10);
for (uint32_t i = 0; true; ) {
uint32_t inSamples = inputLimit;
const float* inputData = mBuffer->GetData(i) + aSourceOffset;
const float* inputData = mBuffer->GetData(i) + aBufferOffset;
uint32_t outSamples = availableInOutputBuffer;
float* outputData =
static_cast<float*>(const_cast<void*>(aOutput->mChannelData[i])) +
aBufferOffset;
aOffsetWithinBlock;
WebAudioUtils::SpeexResamplerProcess(resampler, i,
inputData, &inSamples,
@ -219,7 +220,7 @@ public:
mPosition += inSamples;
MOZ_ASSERT(mPosition <= mDuration || mLoop);
aFramesWritten = outSamples;
if (inSamples == aAvailableInInputBuffer && !mLoop) {
if (inSamples == availableInInputBuffer && !mLoop) {
// If the available output space were unbounded then the input
// latency would always be the correct amount of extra input to
// provide in order to advance the output position to align with
@ -242,7 +243,7 @@ public:
uint32_t outSamples = availableInOutputBuffer;
float* outputData =
static_cast<float*>(const_cast<void*>(aOutput->mChannelData[i])) +
aBufferOffset;
aOffsetWithinBlock;
// AudioDataValue* for aIn selects the function that does not try to
// copy and format-convert input data.
@ -312,6 +313,7 @@ public:
aBufferMax - aBufferOffset),
mStop - *aCurrentPosition);
if (numFrames == WEBAUDIO_BLOCK_SIZE && !ShouldResample(aStream->SampleRate())) {
MOZ_ASSERT(aBufferOffset < aBufferMax);
BorrowFromInputBuffer(aOutput, aChannels, aBufferOffset);
*aOffsetWithinBlock += numFrames;
*aCurrentPosition += numFrames;
@ -321,16 +323,14 @@ public:
AllocateAudioBlock(aChannels, aOutput);
}
if (!ShouldResample(aStream->SampleRate())) {
MOZ_ASSERT(aBufferOffset < aBufferMax);
CopyFromInputBuffer(aOutput, aChannels, aBufferOffset, *aOffsetWithinBlock, numFrames);
*aOffsetWithinBlock += numFrames;
*aCurrentPosition += numFrames;
mPosition += numFrames;
} else {
uint32_t framesWritten, availableInInputBuffer;
availableInInputBuffer = aBufferMax - aBufferOffset;
CopyFromInputBufferWithResampling(aStream, aOutput, aChannels, aBufferOffset, *aOffsetWithinBlock, availableInInputBuffer, framesWritten);
uint32_t framesWritten;
CopyFromInputBufferWithResampling(aStream, aOutput, aChannels, *aOffsetWithinBlock, framesWritten, aBufferOffset, aBufferMax);
*aOffsetWithinBlock += framesWritten;
*aCurrentPosition += framesWritten;
}

Просмотреть файл

@ -372,7 +372,7 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
mHasAudio = true;
mInfo.mAudio.mHasAudio = true;
mAudioCodec = nestegg_track_codec_id(mContext, track);
mCodecDelay = params.codec_delay;
mCodecDelay = params.codec_delay / NS_PER_USEC;
if (mAudioCodec == NESTEGG_CODEC_VORBIS) {
// Get the Vorbis header data
@ -439,6 +439,13 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
return NS_ERROR_FAILURE;
}
if (static_cast<int64_t>(mCodecDelay) != FramesToUsecs(mOpusParser->mPreSkip, mOpusParser->mRate).value()) {
LOG(PR_LOG_WARNING,
("Invalid Opus header: CodecDelay and pre-skip do not match!\n"));
Cleanup();
return NS_ERROR_FAILURE;
}
mInfo.mAudio.mRate = mOpusParser->mRate;
mInfo.mAudio.mChannels = mOpusParser->mChannels;
@ -718,7 +725,7 @@ bool WebMReader::DecodeAudioPacket(nestegg_packet* aPacket, int64_t aOffset)
NS_WARNING("Int overflow converting WebM audio duration");
return false;
}
CheckedInt64 time = startTime - (mCodecDelay / NS_PER_USEC);
CheckedInt64 time = startTime - mCodecDelay;
if (!time.isValid()) {
NS_WARNING("Int overflow shifting tstamp by codec delay");
nestegg_free_packet(aPacket);

Просмотреть файл

@ -215,7 +215,7 @@ private:
// Number of audio frames we've decoded since decoding began at mAudioStartMs.
uint64_t mAudioFrames;
// Number of nanoseconds that must be discarded from the start of the Stream.
// Number of microseconds that must be discarded from the start of the Stream.
uint64_t mCodecDelay;
// Parser state and computed offset-time mappings. Shared by multiple

Просмотреть файл

@ -4776,8 +4776,10 @@ nsDocShell::LoadErrorPage(nsIURI *aURI, const char16_t *aURL,
}
errorPageUrl.AppendLiteral("&c=");
errorPageUrl.AppendASCII(escapedCharset.get());
errorPageUrl.AppendLiteral("&d=");
errorPageUrl.AppendASCII(escapedDescription.get());
nsAutoCString frameType(FrameTypeToString(mFrameType));
errorPageUrl.AppendLiteral("&f=");
errorPageUrl.AppendASCII(frameType.get());
// Append the manifest URL if the error comes from an app.
nsString manifestURL;
@ -4791,9 +4793,10 @@ nsDocShell::LoadErrorPage(nsIURI *aURI, const char16_t *aURL,
errorPageUrl.AppendASCII(manifestParam.get());
}
nsAutoCString frameType(FrameTypeToString(mFrameType));
errorPageUrl.AppendLiteral("&f=");
errorPageUrl.AppendASCII(frameType.get());
// netError.xhtml's getDescription only handles the "d" parameter at the
// end of the URL, so append it last.
errorPageUrl.AppendLiteral("&d=");
errorPageUrl.AppendASCII(escapedDescription.get());
nsCOMPtr<nsIURI> errorPageURI;
rv = NS_NewURI(getter_AddRefs(errorPageURI), errorPageUrl);

Просмотреть файл

@ -1341,7 +1341,6 @@ DrawTargetCG::Init(BackendType aType,
// XXX: Create input parameter to control interpolation and
// use the default for content.
CGContextSetInterpolationQuality(mCg, kCGInterpolationLow);
CGContextSetShouldSmoothFonts(mCg, GetPermitSubpixelAA());
if (aType == BackendType::COREGRAPHICS_ACCELERATED) {
@ -1381,7 +1380,6 @@ DrawTargetCG::Init(CGContextRef cgContext, const IntSize &aSize)
mSize = aSize;
mCg = cgContext;
CGContextSetShouldSmoothFonts(mCg, GetPermitSubpixelAA());
CGContextRetain(mCg);
assert(mCg);
@ -1535,12 +1533,6 @@ DrawTargetCG::MarkChanged()
}
}
void
DrawTargetCG::SetPermitSubpixelAA(bool aPermitSubpixelAA) {
DrawTarget::SetPermitSubpixelAA(aPermitSubpixelAA);
CGContextSetShouldSmoothFonts(mCg, aPermitSubpixelAA);
}
CGContextRef
BorrowedCGContext::BorrowCGContextFromDrawTarget(DrawTarget *aDT)
{

Просмотреть файл

@ -154,7 +154,6 @@ public:
virtual IntSize GetSize() { return mSize; }
virtual void SetPermitSubpixelAA(bool aPermitSubpixelAA) MOZ_OVERRIDE;
/* This is for creating good compatible surfaces */
virtual TemporaryRef<SourceSurface> CreateSourceSurfaceFromData(unsigned char *aData,

Просмотреть файл

@ -235,7 +235,9 @@ CloneAligned(DataSourceSurface* aSource)
{
RefPtr<DataSourceSurface> copy =
Factory::CreateDataSourceSurface(aSource->GetSize(), aSource->GetFormat());
CopyRect(aSource, copy, IntRect(IntPoint(), aSource->GetSize()), IntPoint());
if (copy) {
CopyRect(aSource, copy, IntRect(IntPoint(), aSource->GetSize()), IntPoint());
}
return copy;
}
@ -730,6 +732,13 @@ FilterNodeSoftware::GetInputDataSourceSurface(uint32_t aInputEnumIndex,
RefPtr<DataSourceSurface> result =
GetDataSurfaceInRect(surface, surfaceRect, aRect, aEdgeMode);
if (result &&
(result->Stride() != GetAlignedStride<16>(result->Stride()) ||
reinterpret_cast<uintptr_t>(result->GetData()) % 16 != 0)) {
// Align unaligned surface.
result = CloneAligned(result);
}
if (!result) {
#ifdef DEBUG_DUMP_SURFACES
printf(" -- no input --</section>\n\n");
@ -737,12 +746,6 @@ FilterNodeSoftware::GetInputDataSourceSurface(uint32_t aInputEnumIndex,
return nullptr;
}
if (result->Stride() != GetAlignedStride<16>(result->Stride()) ||
reinterpret_cast<uintptr_t>(result->GetData()) % 16 != 0) {
// Align unaligned surface.
result = CloneAligned(result);
}
SurfaceFormat currentFormat = result->GetFormat();
if (DesiredFormat(currentFormat, aFormatHint) == SurfaceFormat::B8G8R8A8 &&
currentFormat != SurfaceFormat::B8G8R8A8) {

Просмотреть файл

@ -133,8 +133,8 @@ ClientThebesLayer::PaintBuffer(gfxContext* aContext,
ClientManager()->SetTransactionIncomplete();
return;
}
ClientManager()->GetThebesLayerCallback()(this,
aContext,
ClientManager()->GetThebesLayerCallback()(this,
aContext,
aExtendedRegionToDraw,
aClip,
aRegionToInvalidate,

Просмотреть файл

@ -107,7 +107,7 @@ protected:
const nsIntRegion& aRegionToInvalidate,
bool aDidSelfCopy,
DrawRegionClip aClip);
void PaintThebes();
void DestroyBackBuffer()

Просмотреть файл

@ -102,26 +102,16 @@ CompositableClient::CreateDeprecatedTextureClient(DeprecatedTextureClientType aD
switch (aDeprecatedTextureClientType) {
case TEXTURE_SHARED_GL:
if (parentBackend == LayersBackend::LAYERS_OPENGL) {
result = new DeprecatedTextureClientSharedOGL(GetForwarder(), GetTextureInfo());
}
break;
case TEXTURE_SHARED_GL_EXTERNAL:
if (parentBackend == LayersBackend::LAYERS_OPENGL) {
result = new DeprecatedTextureClientSharedOGLExternal(GetForwarder(), GetTextureInfo());
}
break;
case TEXTURE_STREAM_GL:
if (parentBackend == LayersBackend::LAYERS_OPENGL) {
result = new DeprecatedTextureClientStreamOGL(GetForwarder(), GetTextureInfo());
}
break;
MOZ_CRASH("Unsupported. this should not be reached");
case TEXTURE_YCBCR:
if (parentBackend == LayersBackend::LAYERS_OPENGL ||
parentBackend == LayersBackend::LAYERS_D3D9 ||
if (parentBackend == LayersBackend::LAYERS_D3D9 ||
parentBackend == LayersBackend::LAYERS_D3D11 ||
parentBackend == LayersBackend::LAYERS_BASIC) {
result = new DeprecatedTextureClientShmemYCbCr(GetForwarder(), GetTextureInfo());
} else {
MOZ_CRASH("Unsupported. this should not be reached");
}
break;
case TEXTURE_CONTENT:

Просмотреть файл

@ -630,18 +630,22 @@ ContentClientDoubleBuffered::FinalizeFrame(const nsIntRegion& aRegionToDraw)
mFrontClient->Unlock();
return;
}
RefPtr<DrawTarget> dt =
mFrontClient->AsTextureClientDrawTarget()->GetAsDrawTarget();
RefPtr<DrawTarget> dtOnWhite = mFrontClientOnWhite
? mFrontClientOnWhite->AsTextureClientDrawTarget()->GetAsDrawTarget()
: nullptr;
RotatedBuffer frontBuffer(dt,
dtOnWhite,
mFrontBufferRect,
mFrontBufferRotation);
UpdateDestinationFrom(frontBuffer, updateRegion);
// We need to flush our buffers before we unlock our front textures
FlushBuffers();
{
// Restrict the DrawTargets and frontBuffer to a scope to make
// sure there is no more external references to the DrawTargets
// when we Unlock the TextureClients.
RefPtr<DrawTarget> dt =
mFrontClient->AsTextureClientDrawTarget()->GetAsDrawTarget();
RefPtr<DrawTarget> dtOnWhite = mFrontClientOnWhite
? mFrontClientOnWhite->AsTextureClientDrawTarget()->GetAsDrawTarget()
: nullptr;
RotatedBuffer frontBuffer(dt,
dtOnWhite,
mFrontBufferRect,
mFrontBufferRotation);
UpdateDestinationFrom(frontBuffer, updateRegion);
}
mFrontClient->Unlock();
if (mFrontClientOnWhite) {
mFrontClientOnWhite->Unlock();
@ -667,6 +671,8 @@ ContentClientDoubleBuffered::UpdateDestinationFrom(const RotatedBuffer& aSource,
if (isClippingCheap) {
destDT->PopClip();
}
// Flush the destination before the sources become inaccessible (Unlock).
destDT->Flush();
ReturnDrawTargetToBuffer(destDT);
if (aSource.HaveBufferOnWhite()) {
@ -686,6 +692,8 @@ ContentClientDoubleBuffered::UpdateDestinationFrom(const RotatedBuffer& aSource,
if (isClippingCheap) {
destDT->PopClip();
}
// Flush the destination before the sources become inaccessible (Unlock).
destDT->Flush();
ReturnDrawTargetToBuffer(destDT);
}
}

Просмотреть файл

@ -81,6 +81,31 @@ public:
class TextureClientDrawTarget
{
public:
/**
* Returns a DrawTarget to draw into the TextureClient.
*
* This must never be called on a TextureClient that is not sucessfully locked.
* When called several times within one Lock/Unlock pair, this method should
* return the same DrawTarget.
* The DrawTarget is automatically flushed by the TextureClient when the latter
* is unlocked, and the DrawTarget that will be returned within the next
* lock/unlock pair may or may not be the same object.
* Do not keep references to the DrawTarget outside of the lock/unlock pair.
*
* This is typically used as follows:
*
* if (!texture->Lock(OPEN_READ_WRITE)) {
* return false;
* }
* {
* // Restrict this code's scope to ensure all references to dt are gone
* // when Unlock is called.
* RefPtr<DrawTarget> dt = texture->AsTextureClientDrawTarget()->GetAsDrawTarget();
* // use the draw target ...
* }
* texture->Unlock();
*
*/
virtual TemporaryRef<gfx::DrawTarget> GetAsDrawTarget() = 0;
virtual gfx::SurfaceFormat GetFormat() const = 0;
/**
@ -100,7 +125,20 @@ public:
class TextureClientYCbCr
{
public:
/**
* Copy aData into this texture client.
*
* This must never be called on a TextureClient that is not sucessfully locked.
*/
virtual bool UpdateYCbCr(const PlanarYCbCrData& aData) = 0;
/**
* Allocates for a given surface size, taking into account the pixel format
* which is part of the state of the TextureClient.
*
* Does not clear the surface, since we consider that the surface
* be painted entirely with opaque content.
*/
virtual bool AllocateForYCbCr(gfx::IntSize aYSize,
gfx::IntSize aCbCrSize,
StereoMode aStereoMode) = 0;
@ -198,16 +236,6 @@ public:
virtual gfx::IntSize GetSize() const = 0;
/**
* Drop the shared data into a TextureClientData object and mark this
* TextureClient as invalid.
*
* The TextureClient must not hold any reference to the shared data
* after this method has been called.
* The TextureClientData is owned by the caller.
*/
virtual TextureClientData* DropTextureData() = 0;
/**
* TextureFlags contain important information about various aspects
* of the texture, like how its liferime is managed, and how it
@ -235,12 +263,6 @@ public:
*/
bool IsValid() const { return mValid; }
/**
* An invalid TextureClient cannot provide access to its shared data
* anymore. This usually means it will soon be destroyed.
*/
void MarkInvalid() { mValid = false; }
/**
* Create and init the TextureChild/Parent IPDL actor pair.
*
@ -257,7 +279,11 @@ public:
PTextureChild* GetIPDLActor();
/**
* TODO[nical] doc!
* Triggers the destruction of the shared data and the corresponding TextureHost.
*
* If the texture flags contain TEXTURE_DEALLOCATE_CLIENT, the destruction
* will be synchronously coordinated with the compositor side, otherwise it
* will be done asynchronously.
*/
void ForceRemove();
@ -273,6 +299,22 @@ private:
friend class AtomicRefCountedWithFinalize<TextureClient>;
protected:
/**
* An invalid TextureClient cannot provide access to its shared data
* anymore. This usually means it will soon be destroyed.
*/
void MarkInvalid() { mValid = false; }
/**
* Drop the shared data into a TextureClientData object and mark this
* TextureClient as invalid.
*
* The TextureClient must not hold any reference to the shared data
* after this method has been called.
* The TextureClientData is owned by the caller.
*/
virtual TextureClientData* DropTextureData() = 0;
void AddFlags(TextureFlags aFlags)
{
MOZ_ASSERT(!IsSharedWithCompositor());

Просмотреть файл

@ -205,9 +205,9 @@ TiledContentHost::RenderTile(const TiledTexture& aTile,
textureRect.width / aTextureBounds.width,
textureRect.height / aTextureBounds.height);
mCompositor->DrawQuad(graphicsRect, aClipRect, aEffectChain, aOpacity, aTransform);
mCompositor->DrawDiagnostics(DIAGNOSTIC_CONTENT|DIAGNOSTIC_TILE,
graphicsRect, aClipRect, aTransform);
}
mCompositor->DrawDiagnostics(DIAGNOSTIC_CONTENT|DIAGNOSTIC_TILE,
aScreenRegion, aClipRect, aTransform);
aTile.mDeprecatedTextureHost->Unlock();
}

Просмотреть файл

@ -155,18 +155,16 @@ TextureClientD3D11::~TextureClientD3D11()
bool
TextureClientD3D11::Lock(OpenMode aMode)
{
if (!IsValid() || !IsAllocated()) {
return false;
}
MOZ_ASSERT(!mIsLocked, "The Texture is already locked!");
LockD3DTexture(mTexture.get());
mIsLocked = true;
if (mNeedsClear) {
mDrawTarget = GetAsDrawTarget();
mDrawTarget->ClearRect(Rect(0, 0, GetSize().width, GetSize().height));
mNeedsClear = false;
}
mIsLocked = true;
return true;
}
@ -175,8 +173,12 @@ TextureClientD3D11::Unlock()
{
MOZ_ASSERT(mIsLocked, "Unlocked called while the texture is not locked!");
if (mDrawTarget) {
MOZ_ASSERT(mDrawTarget->refCount() == 1);
mDrawTarget->Flush();
}
// The DrawTarget is created only once, and is only usable between calls
// to Lock and Unlock.
UnlockD3DTexture(mTexture.get());
mIsLocked = false;
}
@ -190,8 +192,6 @@ TextureClientD3D11::GetAsDrawTarget()
return mDrawTarget;
}
// The DrawTarget is created only once, and is only usable between calls
// to Lock and Unlock.
mDrawTarget = Factory::CreateDrawTargetForD3D10Texture(mTexture, mFormat);
return mDrawTarget;
}

Просмотреть файл

@ -72,9 +72,11 @@ CreateTextureHostD3D9(const SurfaceDescriptor& aDesc,
}
case SurfaceDescriptor::TSurfaceDescriptorD3D9: {
result = new TextureHostD3D9(aFlags, aDesc);
break;
}
case SurfaceDescriptor::TSurfaceDescriptorDIB: {
result = new DIBTextureHostD3D9(aFlags, aDesc);
break;
}
default: {
NS_WARNING("Unsupported SurfaceDescriptor type");
@ -1255,6 +1257,7 @@ CairoTextureClientD3D9::CairoTextureClientD3D9(gfx::SurfaceFormat aFormat, Textu
: TextureClient(aFlags)
, mFormat(aFormat)
, mIsLocked(false)
, mNeedsClear(false)
{
MOZ_COUNT_CTOR(CairoTextureClientD3D9);
}
@ -1271,6 +1274,10 @@ CairoTextureClientD3D9::Lock(OpenMode)
if (!IsValid() || !IsAllocated()) {
return false;
}
if (mNeedsClear) {
mDrawTarget = GetAsDrawTarget();
mDrawTarget->ClearRect(Rect(0, 0, GetSize().width, GetSize().height));
}
mIsLocked = true;
return true;
}
@ -1353,13 +1360,7 @@ CairoTextureClientD3D9::AllocateForSurface(gfx::IntSize aSize, TextureAllocation
return false;
}
if (aFlags & ALLOC_CLEAR_BUFFER) {
DebugOnly<bool> locked = Lock(OPEN_WRITE_ONLY);
MOZ_ASSERT(locked);
RefPtr<DrawTarget> dt = GetAsDrawTarget();
dt->ClearRect(Rect(0, 0, GetSize().width, GetSize().height));
Unlock();
}
mNeedsClear = aFlags & ALLOC_CLEAR_BUFFER;
MOZ_ASSERT(mTexture);
return true;

Просмотреть файл

@ -227,6 +227,7 @@ private:
gfx::IntSize mSize;
gfx::SurfaceFormat mFormat;
bool mIsLocked;
bool mNeedsClear;
};
/**

Просмотреть файл

@ -139,33 +139,6 @@ StreamTextureClientOGL::IsAllocated() const
return mStream != 0;
}
DeprecatedTextureClientSharedOGL::DeprecatedTextureClientSharedOGL(CompositableForwarder* aForwarder,
const TextureInfo& aTextureInfo)
: DeprecatedTextureClient(aForwarder, aTextureInfo)
, mGL(nullptr)
{
}
void
DeprecatedTextureClientSharedOGL::ReleaseResources()
{
if (!IsSurfaceDescriptorValid(mDescriptor)) {
return;
}
MOZ_ASSERT(mDescriptor.type() == SurfaceDescriptor::TSharedTextureDescriptor);
mDescriptor = SurfaceDescriptor();
// It's important our handle gets released! SharedDeprecatedTextureHostOGL will take
// care of this for us though.
}
bool
DeprecatedTextureClientSharedOGL::EnsureAllocated(gfx::IntSize aSize,
gfxContentType aContentType)
{
mSize = aSize;
return true;
}
} // namespace
} // namespace

Просмотреть файл

@ -101,50 +101,6 @@ protected:
bool mIsLocked;
};
class DeprecatedTextureClientSharedOGL : public DeprecatedTextureClient
{
public:
DeprecatedTextureClientSharedOGL(CompositableForwarder* aForwarder, const TextureInfo& aTextureInfo);
~DeprecatedTextureClientSharedOGL() { ReleaseResources(); }
virtual bool SupportsType(DeprecatedTextureClientType aType) MOZ_OVERRIDE { return aType == TEXTURE_SHARED_GL; }
virtual bool EnsureAllocated(gfx::IntSize aSize, gfxContentType aType);
virtual void ReleaseResources();
virtual gfxContentType GetContentType() MOZ_OVERRIDE { return gfxContentType::COLOR_ALPHA; }
protected:
gl::GLContext* mGL;
gfx::IntSize mSize;
friend class CompositingFactory;
};
// Doesn't own the surface descriptor, so we shouldn't delete it
class DeprecatedTextureClientSharedOGLExternal : public DeprecatedTextureClientSharedOGL
{
public:
DeprecatedTextureClientSharedOGLExternal(CompositableForwarder* aForwarder, const TextureInfo& aTextureInfo)
: DeprecatedTextureClientSharedOGL(aForwarder, aTextureInfo)
{}
virtual bool SupportsType(DeprecatedTextureClientType aType) MOZ_OVERRIDE { return aType == TEXTURE_SHARED_GL_EXTERNAL; }
virtual void ReleaseResources() {}
};
class DeprecatedTextureClientStreamOGL : public DeprecatedTextureClient
{
public:
DeprecatedTextureClientStreamOGL(CompositableForwarder* aForwarder, const TextureInfo& aTextureInfo)
: DeprecatedTextureClient(aForwarder, aTextureInfo)
{}
~DeprecatedTextureClientStreamOGL() { ReleaseResources(); }
virtual bool SupportsType(DeprecatedTextureClientType aType) MOZ_OVERRIDE { return aType == TEXTURE_STREAM_GL; }
virtual bool EnsureAllocated(gfx::IntSize aSize, gfxContentType aType) { return true; }
virtual void ReleaseResources() { mDescriptor = SurfaceDescriptor(); }
virtual gfxContentType GetContentType() MOZ_OVERRIDE { return gfxContentType::COLOR_ALPHA; }
};
} // namespace
} // namespace

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше