This commit is contained in:
Wes Kocher 2014-04-02 19:56:43 -07:00
Родитель 11c55d73d1 b42cc30cf7
Коммит 5b5f39eb94
206 изменённых файлов: 4367 добавлений и 1068 удалений

Просмотреть файл

@ -19,7 +19,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="d11f524d00cacf5ba0dfbf25e4aa2158b1c3a036"/>

Просмотреть файл

@ -12,12 +12,12 @@
<!--original fetch url was https://git.mozilla.org/releases-->
<remote fetch="https://git.mozilla.org/releases" name="mozillaorg"/>
<!-- B2G specific things. -->
<project name="platform_build" path="build" remote="b2g" revision="15d69a6789c638709911507f74d25c0425963636">
<project name="platform_build" path="build" remote="b2g" revision="f6a198295f65ac38f8511803654a3583a1c666af">
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="70b698c2e8d1764a1e27527a102df6452e405b9a"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="89c5816399e71bda92a8959b5b771c04d6672ea3"/>

Просмотреть файл

@ -15,7 +15,7 @@
<project name="platform_build" path="build" remote="b2g" revision="a9e08b91e9cd1f0930f16cfc49ec72f63575d5fe">
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="gaia" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="70b698c2e8d1764a1e27527a102df6452e405b9a"/>

Просмотреть файл

@ -19,7 +19,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="d11f524d00cacf5ba0dfbf25e4aa2158b1c3a036"/>

Просмотреть файл

@ -4,6 +4,6 @@
"remote": "",
"branch": ""
},
"revision": "c23b496987e1e356c1cf7cc848c69511f6db041a",
"revision": "f2784b078a4b58c1f0af8238d5325d0f05dc9fc0",
"repo_path": "/integration/gaia-central"
}

Просмотреть файл

@ -17,7 +17,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

Просмотреть файл

@ -15,7 +15,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

Просмотреть файл

@ -19,7 +19,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

Просмотреть файл

@ -17,7 +17,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

Просмотреть файл

@ -12,12 +12,12 @@
<!--original fetch url was https://git.mozilla.org/releases-->
<remote fetch="https://git.mozilla.org/releases" name="mozillaorg"/>
<!-- B2G specific things. -->
<project name="platform_build" path="build" remote="b2g" revision="15d69a6789c638709911507f74d25c0425963636">
<project name="platform_build" path="build" remote="b2g" revision="f6a198295f65ac38f8511803654a3583a1c666af">
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="moztt" path="external/moztt" remote="b2g" revision="70b698c2e8d1764a1e27527a102df6452e405b9a"/>
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="89c5816399e71bda92a8959b5b771c04d6672ea3"/>

Просмотреть файл

@ -0,0 +1,8 @@
[
{
"size": 266240,
"digest": "bb345b0e700ffab4d09436981f14b5de84da55a3f18a7f09ebc4364a4488acdeab8d46f447b12ac70f2da1444a68b8ce8b8675f0dae2ccf845e966d1df0f0869",
"algorithm": "sha512",
"filename": "mozmake.exe"
}
]

Просмотреть файл

@ -17,7 +17,7 @@
<copyfile dest="Makefile" src="core/root.mk"/>
</project>
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="04d96dd43e2c5c673a0c73b5a65faeb115c2065f"/>
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="f64a5aec7d8138d85de6862b8c6b999abc55acca"/>
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="266bca6e60dad43e395f38b66edabe8bdc882334"/>
<project name="rilproxy" path="rilproxy" remote="b2g" revision="827214fcf38d6569aeb5c6d6f31cb296d1f09272"/>
<project name="librecovery" path="librecovery" remote="b2g" revision="1f6a1fe07f81c5bc5e1d079c9b60f7f78ca2bf4f"/>

Просмотреть файл

@ -8281,7 +8281,7 @@ case "$OS_TARGET" in
NECKO_WIFI=1
fi
;;
Darwin|SunOS|WINNT)
Darwin|FreeBSD|SunOS|WINNT)
NECKO_WIFI=1
;;
Linux)

Просмотреть файл

@ -63,22 +63,21 @@ const R_HOST = new RegExp ("\\*|(((\\*\\.)?" + R_HOSTCHAR.source +
// port = ":" ( 1*DIGIT / "*" )
const R_PORT = new RegExp ("(\\:([0-9]+|\\*))", 'i');
// path
const R_PATH = new RegExp("(\\/(([a-zA-Z0-9\\-\\_]+)\\/?)*)", 'i');
// file
const R_FILE = new RegExp("(\\/([a-zA-Z0-9\\-\\_]+)\\.([a-zA-Z]+))", 'i');
// host-source = [ scheme "://" ] host [ port path file ]
const R_HOSTSRC = new RegExp ("^((((" + R_SCHEME.source + "\\:\\/\\/)?("
const R_HOSTSRC = new RegExp ("^((" + R_SCHEME.source + "\\:\\/\\/)?("
+ R_HOST.source + ")"
+ R_PORT.source + "?)"
+ R_PATH.source + "?)"
+ R_FILE.source + "?)$", 'i');
+ R_PORT.source + "?)$", 'i');
function STRIP_INPUTDELIM(re) {
return re.replace(/(^\^)|(\$$)/g, "");
}
// ext-host-source = host-source "/" *( <VCHAR except ";" and ","> )
// ; ext-host-source is reserved for future use.
const R_EXTHOSTSRC = new RegExp ("^" + R_HOSTSRC.source + "\\/[:print:]+$", 'i');
const R_VCHAR_EXCEPT = new RegExp("[!-+--:<-~]"); // ranges exclude , and ;
const R_EXTHOSTSRC = new RegExp ("^" + STRIP_INPUTDELIM(R_HOSTSRC.source)
+ "\\/"
+ R_VCHAR_EXCEPT.source + "*$", 'i');
// keyword-source = "'self'" / "'unsafe-inline'" / "'unsafe-eval'"
const R_KEYWORDSRC = new RegExp ("^('self'|'unsafe-inline'|'unsafe-eval')$", 'i');
@ -99,6 +98,7 @@ const R_HASHSRC = new RegExp ("^'" + R_HASH_ALGOS.source + "-" + R_BASE64.sou
// source-exp = scheme-source / host-source / keyword-source
const R_SOURCEEXP = new RegExp (R_SCHEMESRC.source + "|" +
R_HOSTSRC.source + "|" +
R_EXTHOSTSRC.source + "|" +
R_KEYWORDSRC.source + "|" +
R_NONCESRC.source + "|" +
R_HASHSRC.source, 'i');
@ -1392,7 +1392,13 @@ CSPSource.fromString = function(aStr, aCSPRep, self, enforceSelfChecks) {
sObj._scheme = schemeMatch[0];
}
// get array of matches to the R_HOST regular expression
// Bug 916054: in CSP 1.0, source-expressions that are paths should have
// the path after the origin ignored and only the origin enforced.
if (R_EXTHOSTSRC.test(aStr)) {
var extHostMatch = R_EXTHOSTSRC.exec(aStr);
aStr = extHostMatch[1];
}
var hostMatch = R_HOSTSRC.exec(aStr);
if (!hostMatch) {
cspError(aCSPRep, CSPLocalizer.getFormatStr("couldntParseInvalidSource",
@ -1400,24 +1406,20 @@ CSPSource.fromString = function(aStr, aCSPRep, self, enforceSelfChecks) {
return null;
}
// Host regex gets scheme, so remove scheme from aStr. Add 3 for '://'
if (schemeMatch)
if (schemeMatch) {
hostMatch = R_HOSTSRC.exec(aStr.substring(schemeMatch[0].length + 3));
// Bug 916054: in CSP 1.0, source-expressions that are paths should have
// the path after the origin ignored and only the origin enforced.
hostMatch[0] = hostMatch[0].replace(R_FILE, "");
hostMatch[0] = hostMatch[0].replace(R_PATH, "");
}
var portMatch = R_PORT.exec(hostMatch);
// Host regex also gets port, so remove the port here.
if (portMatch)
if (portMatch) {
hostMatch = R_HOSTSRC.exec(hostMatch[0].substring(0, hostMatch[0].length - portMatch[0].length));
}
sObj._host = CSPHost.fromString(hostMatch[0]);
if (!portMatch) {
// gets the default port for the given scheme
defPort = Services.io.getProtocolHandler(sObj._scheme).defaultPort;
var defPort = Services.io.getProtocolHandler(sObj._scheme).defaultPort;
if (!defPort) {
cspError(aCSPRep,
CSPLocalizer.getFormatStr("couldntParseInvalidSource",
@ -1440,12 +1442,14 @@ CSPSource.fromString = function(aStr, aCSPRep, self, enforceSelfChecks) {
}
// check for a nonce-source match
if (R_NONCESRC.test(aStr))
if (R_NONCESRC.test(aStr)) {
return CSPNonceSource.fromString(aStr, aCSPRep);
}
// check for a hash-source match
if (R_HASHSRC.test(aStr))
if (R_HASHSRC.test(aStr)) {
return CSPHashSource.fromString(aStr, aCSPRep);
}
// check for 'self' (case insensitive)
if (aStr.toLowerCase() === "'self'") {

Просмотреть файл

@ -27,6 +27,7 @@ var policies = [
["allowed", "test1.example.com/path-1/path_2/file_1.js"],
["allowed", "test1.example.com/path-1/path_2/file-2.js"],
["allowed", "test1.example.com/path-1/path_2/f.js"],
["allowed", "test1.example.com/path-1/path_2/f.oo.js"],
["allowed", "*.example.com"],
["allowed", "*.example.com/"],
["allowed", "*.example.com/path-1"],
@ -36,6 +37,7 @@ var policies = [
["allowed", "*.example.com/path-1/path_2/file_1.js"],
["allowed", "*.example.com/path-1/path_2/file-2.js"],
["allowed", "*.example.com/path-1/path_2/f.js"],
["allowed", "*.example.com/path-1/path_2/f.oo.js"],
["allowed", "test1.example.com:80"],
["allowed", "test1.example.com:80/"],
["allowed", "test1.example.com:80/path-1"],
@ -43,6 +45,7 @@ var policies = [
["allowed", "test1.example.com:80/path-1/path_2"],
["allowed", "test1.example.com:80/path-1/path_2/"],
["allowed", "test1.example.com:80/path-1/path_2/file.js"],
["allowed", "test1.example.com:80/path-1/path_2/f.ile.js"],
["allowed", "test1.example.com:*"],
["allowed", "test1.example.com:*/"],
["allowed", "test1.example.com:*/path-1"],
@ -50,13 +53,9 @@ var policies = [
["allowed", "test1.example.com:*/path-1/path_2"],
["allowed", "test1.example.com:*/path-1/path_2/"],
["allowed", "test1.example.com:*/path-1/path_2/file.js"],
["allowed", "test1.example.com:*/path-1/path_2/f.ile.js"],
// the following tests should fail
["blocked", "test1.example.com/path-1//path_2"],
["blocked", "test1.example.com/path-1/file.js.cpp"],
["blocked", "test1.example.com:88path-1/"],
["blocked", "test1.example.com:80//"],
["blocked", "test1.example.com:80//path-1"],
["blocked", "test1.example.com:80/.js"],
["blocked", "test1.example.com:80.js"],
["blocked", "test1.example.com:*.js"],
["blocked", "test1.example.com:*."]

Просмотреть файл

@ -12,6 +12,32 @@ var ioService = Cc["@mozilla.org/network/io-service;1"]
.getService(Ci.nsIIOService);
var self = ioService.newURI("http://test1.example.com:80", null, null);
function testValidSRCsHostSourceWithSchemeAndPath() {
var csps = [
"http://test1.example.com",
"http://test1.example.com/",
"http://test1.example.com/path-1",
"http://test1.example.com/path-1/",
"http://test1.example.com/path-1/path_2/",
"http://test1.example.com/path-1/path_2/file.js",
"http://test1.example.com/path-1/path_2/file_1.js",
"http://test1.example.com/path-1/path_2/file-2.js",
"http://test1.example.com/path-1/path_2/f.js",
"http://test1.example.com/path-1/path_2/f.oo.js"
]
var obj;
var expected = "http://test1.example.com:80";
for (let i in csps) {
var src = csps[i];
obj = CSPSourceList.fromString(src, undefined, self);
dump("expected: " + expected + "\n");
dump("got: " + obj._sources[0] + "\n");
do_check_eq(1, obj._sources.length);
do_check_eq(obj._sources[0], expected);
}
}
function testValidSRCsRegularHost() {
var csps = [
"test1.example.com",
@ -22,7 +48,8 @@ function testValidSRCsRegularHost() {
"test1.example.com/path-1/path_2/file.js",
"test1.example.com/path-1/path_2/file_1.js",
"test1.example.com/path-1/path_2/file-2.js",
"test1.example.com/path-1/path_2/f.js"
"test1.example.com/path-1/path_2/f.js",
"test1.example.com/path-1/path_2/f.oo.js"
]
var obj;
@ -46,6 +73,7 @@ function testValidSRCsWildCardHost() {
"*.example.com/path-1/path_2/file_1.js",
"*.example.com/path-1/path_2/file-2.js",
"*.example.com/path-1/path_2/f.js",
"*.example.com/path-1/path_2/f.oo.js"
]
var obj;
@ -66,7 +94,8 @@ function testValidSRCsRegularPort() {
"test1.example.com:80/path-1/",
"test1.example.com:80/path-1/path_2",
"test1.example.com:80/path-1/path_2/",
"test1.example.com:80/path-1/path_2/file.js"
"test1.example.com:80/path-1/path_2/file.js",
"test1.example.com:80/path-1/path_2/f.ile.js"
]
var obj;
@ -87,7 +116,8 @@ function testValidSRCsWildCardPort() {
"test1.example.com:*/path-1/",
"test1.example.com:*/path-1/path_2",
"test1.example.com:*/path-1/path_2/",
"test1.example.com:*/path-1/path_2/file.js"
"test1.example.com:*/path-1/path_2/file.js",
"test1.example.com:*/path-1/path_2/f.ile.js"
]
var obj;
@ -103,12 +133,7 @@ function testValidSRCsWildCardPort() {
function testInvalidSRCs() {
var csps = [
"test1.example.com/path-1//path_2",
"test1.example.com/path-1/file.js.cpp",
"test1.example.com:88path-1/",
"test1.example.com:80//",
"test1.example.com:80//path-1",
"test1.example.com:80/.js",
"test1.example.com:80.js",
"test1.example.com:*.js",
"test1.example.com:*."
@ -124,6 +149,7 @@ function testInvalidSRCs() {
}
function run_test() {
testValidSRCsHostSourceWithSchemeAndPath();
testValidSRCsRegularHost();
testValidSRCsWildCardHost();
testValidSRCsRegularPort();

Просмотреть файл

@ -325,5 +325,43 @@ HTMLTrackElement::ReadyState() const
return mTrack->ReadyState();
}
void
HTMLTrackElement::SetReadyState(uint16_t aReadyState)
{
if (mTrack) {
switch (aReadyState) {
case TextTrackReadyState::Loaded:
DispatchTrackRunnable(NS_LITERAL_STRING("loaded"));
break;
case TextTrackReadyState::FailedToLoad:
DispatchTrackRunnable(NS_LITERAL_STRING("error"));
break;
}
mTrack->SetReadyState(aReadyState);
}
}
void
HTMLTrackElement::DispatchTrackRunnable(const nsString& aEventName)
{
nsCOMPtr<nsIRunnable> runnable =
NS_NewRunnableMethodWithArg
<const nsString>(this,
&HTMLTrackElement::DispatchTrustedEvent,
aEventName);
NS_DispatchToMainThread(runnable);
}
void
HTMLTrackElement::DispatchTrustedEvent(const nsAString& aName)
{
nsIDocument* doc = OwnerDoc();
if (!doc) {
return;
}
nsContentUtils::DispatchTrustedEvent(doc, static_cast<nsIContent*>(this),
aName, false, false);
}
} // namespace dom
} // namespace mozilla

Просмотреть файл

@ -86,6 +86,7 @@ public:
}
uint16_t ReadyState() const;
void SetReadyState(uint16_t aReadyState);
TextTrack* Track();
@ -121,6 +122,8 @@ public:
// Check enabling preference.
static bool IsWebVTTEnabled();
void DispatchTrackRunnable(const nsString& aEventName);
void DispatchTrustedEvent(const nsAString& aName);
protected:
virtual JSObject* WrapNode(JSContext* aCx,
JS::Handle<JSObject*> aScope) MOZ_OVERRIDE;

Просмотреть файл

@ -0,0 +1,85 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_AUDIOMIXER_H_
#define MOZILLA_AUDIOMIXER_H_
#include "AudioSampleFormat.h"
#include "nsTArray.h"
#include "mozilla/PodOperations.h"
namespace mozilla {
typedef void(*MixerFunc)(AudioDataValue* aMixedBuffer,
AudioSampleFormat aFormat,
uint32_t aChannels,
uint32_t aFrames);
/**
* This class mixes multiple streams of audio together to output a single audio
* stream.
*
* AudioMixer::Mix is to be called repeatedly with buffers that have the same
* length, sample rate, sample format and channel count.
*
* When all the tracks have been mixed, calling FinishMixing will call back with
* a buffer containing the mixed audio data.
*
* This class is not thread safe.
*/
class AudioMixer
{
public:
AudioMixer(MixerFunc aCallback)
: mCallback(aCallback),
mFrames(0),
mChannels(0)
{ }
/* Get the data from the mixer. This is supposed to be called when all the
* tracks have been mixed in. The caller should not hold onto the data. */
void FinishMixing() {
mCallback(mMixedAudio.Elements(),
AudioSampleTypeToFormat<AudioDataValue>::Format,
mChannels,
mFrames);
PodZero(mMixedAudio.Elements(), mMixedAudio.Length());
mChannels = mFrames = 0;
}
/* Add a buffer to the mix. aSamples is interleaved. */
void Mix(AudioDataValue* aSamples, uint32_t aChannels, uint32_t aFrames) {
if (!mFrames && !mChannels) {
mFrames = aFrames;
mChannels = aChannels;
EnsureCapacityAndSilence();
}
MOZ_ASSERT(aFrames == mFrames);
MOZ_ASSERT(aChannels == mChannels);
for (uint32_t i = 0; i < aFrames * aChannels; i++) {
mMixedAudio[i] += aSamples[i];
}
}
private:
void EnsureCapacityAndSilence() {
if (mFrames * mChannels > mMixedAudio.Length()) {
mMixedAudio.SetLength(mFrames* mChannels);
}
PodZero(mMixedAudio.Elements(), mMixedAudio.Length());
}
/* Function that is called when the mixing is done. */
MixerFunc mCallback;
/* Number of frames for this mixing block. */
uint32_t mFrames;
/* Number of channels for this mixing block. */
uint32_t mChannels;
/* Buffer containing the mixed audio data. */
nsTArray<AudioDataValue> mMixedAudio;
};
}
#endif // MOZILLA_AUDIOMIXER_H_

Просмотреть файл

@ -107,15 +107,6 @@ ResampleChannelBuffer(SpeexResamplerState* aResampler, uint32_t aChannel,
}
}
class SharedChannelArrayBuffer : public ThreadSharedObject {
public:
SharedChannelArrayBuffer(nsTArray<nsTArray<float> >* aBuffers)
{
mBuffers.SwapElements(*aBuffers);
}
nsTArray<nsTArray<float> > mBuffers;
};
void
AudioNodeExternalInputStream::TrackMapEntry::ResampleChannels(const nsTArray<const void*>& aBuffers,
uint32_t aInputDuration,
@ -178,7 +169,7 @@ AudioNodeExternalInputStream::TrackMapEntry::ResampleChannels(const nsTArray<con
}
uint32_t length = resampledBuffers[0].Length();
nsRefPtr<ThreadSharedObject> buf = new SharedChannelArrayBuffer(&resampledBuffers);
nsRefPtr<ThreadSharedObject> buf = new SharedChannelArrayBuffer<float>(&resampledBuffers);
mResampledData.AppendFrames(buf.forget(), bufferPtrs, length);
}

Просмотреть файл

@ -49,8 +49,19 @@ public:
typedef AudioSampleTraits<AUDIO_OUTPUT_FORMAT>::Type AudioDataValue;
// Single-sample conversion
template<typename T> class AudioSampleTypeToFormat;
template <> class AudioSampleTypeToFormat<float> {
public:
static const AudioSampleFormat Format = AUDIO_FORMAT_FLOAT32;
};
template <> class AudioSampleTypeToFormat<short> {
public:
static const AudioSampleFormat Format = AUDIO_FORMAT_S16;
};
// Single-sample conversion
/*
* Use "2^N" conversion since it's simple, fast, "bit transparent", used by
* many other libraries and apparently behaves reasonably.

Просмотреть файл

@ -6,8 +6,10 @@
#include "AudioSegment.h"
#include "AudioStream.h"
#include "AudioMixer.h"
#include "AudioChannelFormat.h"
#include "Latency.h"
#include "speex/speex_resampler.h"
namespace mozilla {
@ -109,70 +111,98 @@ DownmixAndInterleave(const nsTArray<const void*>& aChannelData,
aDuration, aVolume, aOutputChannels, aOutput);
}
void AudioSegment::ResampleChunks(SpeexResamplerState* aResampler)
{
uint32_t inRate, outRate;
if (mChunks.IsEmpty()) {
return;
}
speex_resampler_get_rate(aResampler, &inRate, &outRate);
switch (mChunks[0].mBufferFormat) {
case AUDIO_FORMAT_FLOAT32:
Resample<float>(aResampler, inRate, outRate);
break;
case AUDIO_FORMAT_S16:
Resample<int16_t>(aResampler, inRate, outRate);
break;
default:
MOZ_ASSERT(false);
break;
}
}
void
AudioSegment::WriteTo(uint64_t aID, AudioStream* aOutput)
AudioSegment::WriteTo(uint64_t aID, AudioStream* aOutput, AudioMixer* aMixer)
{
uint32_t outputChannels = aOutput->GetChannels();
nsAutoTArray<AudioDataValue,AUDIO_PROCESSING_FRAMES*GUESS_AUDIO_CHANNELS> buf;
nsAutoTArray<const void*,GUESS_AUDIO_CHANNELS> channelData;
if (!GetDuration()) {
return;
}
uint32_t outBufferLength = GetDuration() * outputChannels;
buf.SetLength(outBufferLength);
// Offset in the buffer that will end up sent to the AudioStream.
uint32_t offset = 0;
for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
AudioChunk& c = *ci;
TrackTicks offset = 0;
while (offset < c.mDuration) {
TrackTicks durationTicks =
std::min<TrackTicks>(c.mDuration - offset, AUDIO_PROCESSING_FRAMES);
if (uint64_t(outputChannels)*durationTicks > INT32_MAX || offset > INT32_MAX) {
NS_ERROR("Buffer overflow");
return;
}
uint32_t frames = c.mDuration;
uint32_t duration = uint32_t(durationTicks);
// If we have written data in the past, or we have real (non-silent) data
// to write, we can proceed. Otherwise, it means we just started the
// AudioStream, and we don't have real data to write to it (just silence).
// To avoid overbuffering in the AudioStream, we simply drop the silence,
// here. The stream will underrun and output silence anyways.
if (c.mBuffer || aOutput->GetWritten()) {
buf.SetLength(outputChannels*duration);
if (c.mBuffer) {
channelData.SetLength(c.mChannelData.Length());
for (uint32_t i = 0; i < channelData.Length(); ++i) {
channelData[i] =
AddAudioSampleOffset(c.mChannelData[i], c.mBufferFormat, int32_t(offset));
}
if (channelData.Length() < outputChannels) {
// Up-mix. Note that this might actually make channelData have more
// than outputChannels temporarily.
AudioChannelsUpMix(&channelData, outputChannels, gZeroChannel);
}
if (channelData.Length() > outputChannels) {
// Down-mix.
DownmixAndInterleave(channelData, c.mBufferFormat, duration,
c.mVolume, outputChannels, buf.Elements());
} else {
InterleaveAndConvertBuffer(channelData.Elements(), c.mBufferFormat,
duration, c.mVolume,
outputChannels,
buf.Elements());
}
} else {
// Assumes that a bit pattern of zeroes == 0.0f
memset(buf.Elements(), 0, buf.Length()*sizeof(AudioDataValue));
// If we have written data in the past, or we have real (non-silent) data
// to write, we can proceed. Otherwise, it means we just started the
// AudioStream, and we don't have real data to write to it (just silence).
// To avoid overbuffering in the AudioStream, we simply drop the silence,
// here. The stream will underrun and output silence anyways.
if (c.mBuffer || aOutput->GetWritten()) {
if (c.mBuffer) {
channelData.SetLength(c.mChannelData.Length());
for (uint32_t i = 0; i < channelData.Length(); ++i) {
channelData[i] = c.mChannelData[i];
}
aOutput->Write(buf.Elements(), int32_t(duration), &(c.mTimeStamp));
if (channelData.Length() < outputChannels) {
// Up-mix. Note that this might actually make channelData have more
// than outputChannels temporarily.
AudioChannelsUpMix(&channelData, outputChannels, gZeroChannel);
}
if (channelData.Length() > outputChannels) {
// Down-mix.
DownmixAndInterleave(channelData, c.mBufferFormat, frames,
c.mVolume, outputChannels, buf.Elements() + offset);
} else {
InterleaveAndConvertBuffer(channelData.Elements(), c.mBufferFormat,
frames, c.mVolume,
outputChannels,
buf.Elements() + offset);
}
} else {
// Assumes that a bit pattern of zeroes == 0.0f
memset(buf.Elements() + offset, 0, outputChannels * frames * sizeof(AudioDataValue));
}
if(!c.mTimeStamp.IsNull()) {
TimeStamp now = TimeStamp::Now();
// would be more efficient to c.mTimeStamp to ms on create time then pass here
LogTime(AsyncLatencyLogger::AudioMediaStreamTrack, aID,
(now - c.mTimeStamp).ToMilliseconds(), c.mTimeStamp);
}
offset += duration;
}
offset += frames * outputChannels;
if (!c.mTimeStamp.IsNull()) {
TimeStamp now = TimeStamp::Now();
// would be more efficient to c.mTimeStamp to ms on create time then pass here
LogTime(AsyncLatencyLogger::AudioMediaStreamTrack, aID,
(now - c.mTimeStamp).ToMilliseconds(), c.mTimeStamp);
}
}
aOutput->Write(buf.Elements(), GetDuration(), &(mChunks[mChunks.Length() - 1].mTimeStamp));
if (aMixer) {
aMixer->Mix(buf.Elements(), outputChannels, GetDuration());
}
aOutput->Start();
}

Просмотреть файл

@ -9,13 +9,25 @@
#include "MediaSegment.h"
#include "AudioSampleFormat.h"
#include "SharedBuffer.h"
#include "WebAudioUtils.h"
#ifdef MOZILLA_INTERNAL_API
#include "mozilla/TimeStamp.h"
#endif
namespace mozilla {
template<typename T>
class SharedChannelArrayBuffer : public ThreadSharedObject {
public:
SharedChannelArrayBuffer(nsTArray<nsTArray<T>>* aBuffers)
{
mBuffers.SwapElements(*aBuffers);
}
nsTArray<nsTArray<T>> mBuffers;
};
class AudioStream;
class AudioMixer;
/**
* For auto-arrays etc, guess this as the common number of channels.
@ -111,6 +123,7 @@ struct AudioChunk {
#endif
};
/**
* A list of audio samples consisting of a sequence of slices of SharedBuffers.
* The audio rate is determined by the track, not stored in this class.
@ -121,6 +134,43 @@ public:
AudioSegment() : MediaSegmentBase<AudioSegment, AudioChunk>(AUDIO) {}
// Resample the whole segment in place.
template<typename T>
void Resample(SpeexResamplerState* aResampler, uint32_t aInRate, uint32_t aOutRate)
{
mDuration = 0;
for (ChunkIterator ci(*this); !ci.IsEnded(); ci.Next()) {
nsAutoTArray<nsTArray<T>, GUESS_AUDIO_CHANNELS> output;
nsAutoTArray<const T*, GUESS_AUDIO_CHANNELS> bufferPtrs;
AudioChunk& c = *ci;
uint32_t channels = c.mChannelData.Length();
output.SetLength(channels);
bufferPtrs.SetLength(channels);
uint32_t inFrames = c.mDuration,
outFrames = c.mDuration * aOutRate / aInRate;
for (uint32_t i = 0; i < channels; i++) {
const T* in = static_cast<const T*>(c.mChannelData[i]);
T* out = output[i].AppendElements(outFrames);
dom::WebAudioUtils::SpeexResamplerProcess(aResampler, i,
in, &inFrames,
out, &outFrames);
bufferPtrs[i] = out;
output[i].SetLength(outFrames);
}
c.mBuffer = new mozilla::SharedChannelArrayBuffer<T>(&output);
for (uint32_t i = 0; i < channels; i++) {
c.mChannelData[i] = bufferPtrs[i];
}
c.mDuration = outFrames;
mDuration += c.mDuration;
}
}
void ResampleChunks(SpeexResamplerState* aResampler);
void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
const nsTArray<const float*>& aChannelData,
int32_t aDuration)
@ -166,7 +216,13 @@ public:
return chunk;
}
void ApplyVolume(float aVolume);
void WriteTo(uint64_t aID, AudioStream* aOutput);
void WriteTo(uint64_t aID, AudioStream* aOutput, AudioMixer* aMixer = nullptr);
int ChannelCount() {
NS_WARN_IF_FALSE(!mChunks.IsEmpty(),
"Cannot query channel count on a AudioSegment with no chunks.");
return mChunks.IsEmpty() ? 0 : mChunks[0].mChannelData.Length();
}
static Type StaticType() { return AUDIO; }
};

Просмотреть файл

@ -85,7 +85,7 @@ public:
// Represents a change yet to be made to a block in the file. The change
// is either a write (and the data to be written is stored in this struct)
// or a move (and the index of the source block is stored instead).
struct BlockChange {
struct BlockChange MOZ_FINAL {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(BlockChange)
@ -113,6 +113,12 @@ public:
return mSourceBlockIndex == -1 &&
mData.get() != nullptr;
}
private:
// Private destructor, to discourage deletion outside of Release():
~BlockChange()
{
}
};
class Int32Queue : private nsDeque {

Просмотреть файл

@ -698,7 +698,6 @@ void MediaDecoder::QueueMetadata(int64_t aPublishTime,
bool
MediaDecoder::IsDataCachedToEndOfResource()
{
NS_ASSERTION(!mShuttingDown, "Don't call during shutdown!");
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
return (mResource &&
mResource->IsDataCachedToEndOfResource(mDecoderPosition));

Просмотреть файл

@ -160,6 +160,7 @@ MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
bool aRealTime) :
mDecoder(aDecoder),
mState(DECODER_STATE_DECODING_METADATA),
mInRunningStateMachine(false),
mSyncPointInMediaStream(-1),
mSyncPointInDecodedStream(-1),
mResetPlayStartTime(false),
@ -191,10 +192,7 @@ MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
mDispatchedEventToDecode(false),
mStopAudioThread(true),
mQuickBuffering(false),
mIsRunning(false),
mRunAgain(false),
mMinimizePreroll(false),
mDispatchedRunEvent(false),
mDecodeThreadWaiting(false),
mRealTime(aRealTime),
mEventManager(aDecoder),
@ -1047,6 +1045,12 @@ nsresult MediaDecoderStateMachine::Init(MediaDecoderStateMachine* aCloneDonor)
mStateMachineThreadPool = stateMachinePool;
nsresult rv;
mTimer = do_CreateInstance("@mozilla.org/timer;1", &rv);
NS_ENSURE_SUCCESS(rv, rv);
rv = mTimer->SetTarget(GetStateMachineThread());
NS_ENSURE_SUCCESS(rv, rv);
return mReader->Init(cloneReader);
}
@ -2700,42 +2704,21 @@ nsresult MediaDecoderStateMachine::GetBuffered(dom::TimeRanges* aBuffered) {
return res;
}
nsresult MediaDecoderStateMachine::Run()
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
return CallRunStateMachine();
}
nsresult MediaDecoderStateMachine::CallRunStateMachine()
{
AssertCurrentThreadInMonitor();
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
// This will be set to true by ScheduleStateMachine() if it's called
// while we're in RunStateMachine().
mRunAgain = false;
// Set to true whenever we dispatch an event to run this state machine.
// This flag prevents us from dispatching
mDispatchedRunEvent = false;
// If audio is being captured, stop the audio thread if it's running
if (mAudioCaptured) {
StopAudioThread();
}
MOZ_ASSERT(!mInRunningStateMachine, "State machine cycles must run in sequence!");
mTimeout = TimeStamp();
mIsRunning = true;
mInRunningStateMachine = true;
nsresult res = RunStateMachine();
mIsRunning = false;
if (mRunAgain && !mDispatchedRunEvent) {
mDispatchedRunEvent = true;
return GetStateMachineThread()->Dispatch(this, NS_DISPATCH_NORMAL);
}
mInRunningStateMachine = false;
return res;
}
@ -2750,16 +2733,7 @@ void MediaDecoderStateMachine::TimeoutExpired()
{
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
NS_ASSERTION(OnStateMachineThread(), "Must be on state machine thread");
if (mIsRunning) {
mRunAgain = true;
} else if (!mDispatchedRunEvent) {
// We don't have an event dispatched to run the state machine, so we
// can just run it from here.
CallRunStateMachine();
}
// Otherwise, an event has already been dispatched to run the state machine
// as soon as possible. Nothing else needed to do, the state machine is
// going to run anyway.
CallRunStateMachine();
}
void MediaDecoderStateMachine::ScheduleStateMachineWithLockAndWakeDecoder() {
@ -2779,60 +2753,25 @@ nsresult MediaDecoderStateMachine::ScheduleStateMachine(int64_t aUsecs) {
aUsecs = std::max<int64_t>(aUsecs, 0);
TimeStamp timeout = TimeStamp::Now() + UsecsToDuration(aUsecs);
if (!mTimeout.IsNull()) {
if (timeout >= mTimeout) {
// We've already scheduled a timer set to expire at or before this time,
// or have an event dispatched to run the state machine.
return NS_OK;
}
if (mTimer) {
// We've been asked to schedule a timer to run before an existing timer.
// Cancel the existing timer.
mTimer->Cancel();
}
if (!mTimeout.IsNull() && timeout >= mTimeout) {
// We've already scheduled a timer set to expire at or before this time,
// or have an event dispatched to run the state machine.
return NS_OK;
}
uint32_t ms = static_cast<uint32_t>((aUsecs / USECS_PER_MS) & 0xFFFFFFFF);
if (mRealTime && ms > 40)
if (mRealTime && ms > 40) {
ms = 40;
if (ms == 0) {
if (mIsRunning) {
// We're currently running this state machine on the state machine
// thread. Signal it to run again once it finishes its current cycle.
mRunAgain = true;
return NS_OK;
} else if (!mDispatchedRunEvent) {
// We're not currently running this state machine on the state machine
// thread. Dispatch an event to run one cycle of the state machine.
mDispatchedRunEvent = true;
return GetStateMachineThread()->Dispatch(this, NS_DISPATCH_NORMAL);
}
// We're not currently running this state machine on the state machine
// thread, but something has already dispatched an event to run it again,
// so just exit; it's going to run real soon.
return NS_OK;
}
// Since there is already a pending task that will run immediately,
// we don't need to schedule a timer task.
if (mRunAgain) {
return NS_OK;
}
mTimeout = timeout;
nsresult res;
if (!mTimer) {
mTimer = do_CreateInstance("@mozilla.org/timer;1", &res);
if (NS_FAILED(res)) return res;
mTimer->SetTarget(GetStateMachineThread());
}
res = mTimer->InitWithFuncCallback(mozilla::TimeoutExpired,
this,
ms,
nsITimer::TYPE_ONE_SHOT);
return res;
// Cancel existing timer if any since we are going to schedule a new one.
mTimer->Cancel();
nsresult rv = mTimer->InitWithFuncCallback(mozilla::TimeoutExpired,
this,
ms,
nsITimer::TYPE_ONE_SHOT);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
bool MediaDecoderStateMachine::OnDecodeThread() const

Просмотреть файл

@ -113,8 +113,9 @@ class SharedThreadPool;
See MediaDecoder.h for more details.
*/
class MediaDecoderStateMachine : public nsRunnable
class MediaDecoderStateMachine
{
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaDecoderStateMachine)
public:
typedef MediaDecoder::DecodedStreamData DecodedStreamData;
MediaDecoderStateMachine(MediaDecoder* aDecoder,
@ -227,9 +228,6 @@ public:
// the main thread.
void StartBuffering();
// State machine thread run function. Defers to RunStateMachine().
NS_IMETHOD Run() MOZ_OVERRIDE;
// This is called on the state machine thread and audio thread.
// The decoder monitor must be obtained before calling this.
bool HasAudio() const {
@ -646,7 +644,7 @@ private:
bool IsStateMachineScheduled() const {
AssertCurrentThreadInMonitor();
return !mTimeout.IsNull() || mRunAgain;
return !mTimeout.IsNull();
}
// Returns true if we're not playing and the decode thread has filled its
@ -682,15 +680,17 @@ private:
RefPtr<SharedThreadPool> mStateMachineThreadPool;
// Timer to call the state machine Run() method. Used by
// Timer to run the state machine cycles. Used by
// ScheduleStateMachine(). Access protected by decoder monitor.
nsCOMPtr<nsITimer> mTimer;
// Timestamp at which the next state machine Run() method will be called.
// If this is non-null, a call to Run() is scheduled, either by a timer,
// or via an event. Access protected by decoder monitor.
// Timestamp at which the next state machine cycle will run.
// Access protected by decoder monitor.
TimeStamp mTimeout;
// Used to check if there are state machine cycles are running in sequence.
DebugOnly<bool> mInRunningStateMachine;
// The time that playback started from the system clock. This is used for
// timing the presentation of video frames when there's no audio.
// Accessed only via the state machine thread.
@ -920,14 +920,6 @@ private:
// Synchronised via decoder monitor.
bool mQuickBuffering;
// True if the shared state machine thread is currently running this
// state machine.
bool mIsRunning;
// True if we should run the state machine again once the current
// state machine run has finished.
bool mRunAgain;
// True if we should not decode/preroll unnecessary samples, unless we're
// played. "Prerolling" in this context refers to when we decode and
// buffer decoded samples in advance of when they're needed for playback.
@ -940,13 +932,6 @@ private:
// memory and CPU overhead.
bool mMinimizePreroll;
// True if we've dispatched an event to run the state machine. It's
// imperative that we don't dispatch multiple events to run the state
// machine at the same time, as our code assume all events are synchronous.
// If we dispatch multiple events, the second event can run while the
// first is shutting down a thread, causing inconsistent state.
bool mDispatchedRunEvent;
// True if the decode thread has gone filled its buffers and is now
// waiting to be awakened before it continues decoding. Synchronized
// by the decoder monitor.

Просмотреть файл

@ -267,9 +267,8 @@ protected:
void AppendSliceInternal(const MediaSegmentBase<C, Chunk>& aSource,
TrackTicks aStart, TrackTicks aEnd)
{
NS_ASSERTION(aStart <= aEnd, "Endpoints inverted");
NS_WARN_IF_FALSE(aStart >= 0 && aEnd <= aSource.mDuration,
"Slice out of range");
MOZ_ASSERT(aStart <= aEnd, "Endpoints inverted");
MOZ_ASSERT(aStart >= 0 && aEnd <= aSource.mDuration, "Slice out of range");
mDuration += aEnd - aStart;
TrackTicks offset = 0;
for (uint32_t i = 0; i < aSource.mChunks.Length() && offset < aEnd; ++i) {

Просмотреть файл

@ -26,6 +26,8 @@
#include "DOMMediaStream.h"
#include "GeckoProfiler.h"
#include "mozilla/unused.h"
#include "speex/speex_resampler.h"
#include "AudioOutputObserver.h"
using namespace mozilla::layers;
using namespace mozilla::dom;
@ -172,15 +174,16 @@ MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
MediaStreamListener* l = aStream->mListeners[j];
TrackTicks offset = (data->mCommands & SourceMediaStream::TRACK_CREATE)
? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration();
l->NotifyQueuedTrackChanges(this, data->mID, data->mRate,
l->NotifyQueuedTrackChanges(this, data->mID, data->mOutputRate,
offset, data->mCommands, *data->mData);
}
if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
MediaSegment* segment = data->mData.forget();
STREAM_LOG(PR_LOG_DEBUG, ("SourceMediaStream %p creating track %d, rate %d, start %lld, initial end %lld",
aStream, data->mID, data->mRate, int64_t(data->mStart),
aStream, data->mID, data->mOutputRate, int64_t(data->mStart),
int64_t(segment->GetDuration())));
aStream->mBuffer.AddTrack(data->mID, data->mRate, data->mStart, segment);
aStream->mBuffer.AddTrack(data->mID, data->mOutputRate, data->mStart, segment);
// The track has taken ownership of data->mData, so let's replace
// data->mData with an empty clone.
data->mData = segment->CreateEmptyClone();
@ -332,7 +335,7 @@ MediaStreamGraphImpl::GetAudioPosition(MediaStream* aStream)
return mCurrentTime;
}
return aStream->mAudioOutputStreams[0].mAudioPlaybackStartTime +
TicksToTimeRoundDown(aStream->mAudioOutputStreams[0].mStream->GetRate(),
TicksToTimeRoundDown(IdealAudioRate(),
positionInFrames);
}
@ -575,17 +578,37 @@ MediaStreamGraphImpl::UpdateStreamOrderForStream(mozilla::LinkedList<MediaStream
*mStreams.AppendElement() = stream.forget();
}
static void AudioMixerCallback(AudioDataValue* aMixedBuffer,
AudioSampleFormat aFormat,
uint32_t aChannels,
uint32_t aFrames)
{
// Need an api to register mixer callbacks, bug 989921
if (aFrames > 0 && aChannels > 0) {
// XXX need Observer base class and registration API
if (gFarendObserver) {
gFarendObserver->InsertFarEnd(aMixedBuffer, aFrames, false,
IdealAudioRate(), aChannels, aFormat);
}
}
}
void
MediaStreamGraphImpl::UpdateStreamOrder()
{
mOldStreams.SwapElements(mStreams);
mStreams.ClearAndRetainStorage();
bool shouldMix = false;
for (uint32_t i = 0; i < mOldStreams.Length(); ++i) {
MediaStream* stream = mOldStreams[i];
stream->mHasBeenOrdered = false;
stream->mIsConsumed = false;
stream->mIsOnOrderingStack = false;
stream->mInBlockingSet = false;
if (stream->AsSourceStream() &&
stream->AsSourceStream()->NeedsMixing()) {
shouldMix = true;
}
ProcessedMediaStream* ps = stream->AsProcessedStream();
if (ps) {
ps->mInCycle = false;
@ -596,6 +619,12 @@ MediaStreamGraphImpl::UpdateStreamOrder()
}
}
if (!mMixer && shouldMix) {
mMixer = new AudioMixer(AudioMixerCallback);
} else if (mMixer && !shouldMix) {
mMixer = nullptr;
}
mozilla::LinkedList<MediaStream> stack;
for (uint32_t i = 0; i < mOldStreams.Length(); ++i) {
nsRefPtr<MediaStream>& s = mOldStreams[i];
@ -808,10 +837,11 @@ MediaStreamGraphImpl::CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTim
aStream->mAudioOutputStreams.AppendElement();
audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
audioOutputStream->mBlockedAudioTime = 0;
audioOutputStream->mLastTickWritten = 0;
audioOutputStream->mStream = new AudioStream();
// XXX for now, allocate stereo output. But we need to fix this to
// match the system's ideal channel configuration.
audioOutputStream->mStream->Init(2, tracks->GetRate(), AUDIO_CHANNEL_NORMAL, AudioStream::LowLatency);
audioOutputStream->mStream->Init(2, IdealAudioRate(), AUDIO_CHANNEL_NORMAL, AudioStream::LowLatency);
audioOutputStream->mTrackID = tracks->GetID();
LogLatency(AsyncLatencyLogger::AudioStreamCreate,
@ -829,14 +859,22 @@ MediaStreamGraphImpl::CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTim
}
}
void
TrackTicks
MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
GraphTime aFrom, GraphTime aTo)
{
MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode");
TrackTicks ticksWritten = 0;
// We compute the number of needed ticks by converting a difference of graph
// time rather than by substracting two converted stream time to ensure that
// the rounding between {Graph,Stream}Time and track ticks is not dependant
// on the absolute value of the {Graph,Stream}Time, and so that number of
// ticks to play is the same for each cycle.
TrackTicks ticksNeeded = TimeToTicksRoundDown(IdealAudioRate(), aTo) - TimeToTicksRoundDown(IdealAudioRate(), aFrom);
if (aStream->mAudioOutputStreams.IsEmpty()) {
return;
return 0;
}
// When we're playing multiple copies of this stream at the same time, they're
@ -850,6 +888,25 @@ MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i];
StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID);
AudioSegment* audio = track->Get<AudioSegment>();
AudioSegment output;
MOZ_ASSERT(track->GetRate() == IdealAudioRate());
// offset and audioOutput.mLastTickWritten can differ by at most one sample,
// because of the rounding issue. We track that to ensure we don't skip a
// sample, or play a sample twice.
TrackTicks offset = track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, aFrom));
if (!audioOutput.mLastTickWritten) {
audioOutput.mLastTickWritten = offset;
}
if (audioOutput.mLastTickWritten != offset) {
// If there is a global underrun of the MSG, this property won't hold, and
// we reset the sample count tracking.
if (std::abs(audioOutput.mLastTickWritten - offset) != 1) {
audioOutput.mLastTickWritten = offset;
} else {
offset = audioOutput.mLastTickWritten;
}
}
// We don't update aStream->mBufferStartTime here to account for
// time spent blocked. Instead, we'll update it in UpdateCurrentTime after the
@ -857,54 +914,59 @@ MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
// right offsets in the stream buffer, even if we've already written silence for
// some amount of blocked time after the current time.
GraphTime t = aFrom;
while (t < aTo) {
while (ticksNeeded) {
GraphTime end;
bool blocked = aStream->mBlocked.GetAt(t, &end);
end = std::min(end, aTo);
AudioSegment output;
if (blocked) {
// Track total blocked time in aStream->mBlockedAudioTime so that
// the amount of silent samples we've inserted for blocking never gets
// more than one sample away from the ideal amount.
TrackTicks startTicks =
TimeToTicksRoundDown(track->GetRate(), audioOutput.mBlockedAudioTime);
audioOutput.mBlockedAudioTime += end - t;
TrackTicks endTicks =
TimeToTicksRoundDown(track->GetRate(), audioOutput.mBlockedAudioTime);
output.InsertNullDataAtStart(endTicks - startTicks);
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing blocking-silence samples for %f to %f",
aStream, MediaTimeToSeconds(t), MediaTimeToSeconds(end)));
// Check how many ticks of sound we can provide if we are blocked some
// time in the middle of this cycle.
TrackTicks toWrite = 0;
if (end >= aTo) {
toWrite = ticksNeeded;
} else {
TrackTicks startTicks =
track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, t));
TrackTicks endTicks =
track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, end));
// If startTicks is before the track start, then that part of 'audio'
// will just be silence, which is fine here. But if endTicks is after
// the track end, then 'audio' won't be long enough, so we'll need
// to explicitly play silence.
TrackTicks sliceEnd = std::min(endTicks, audio->GetDuration());
if (sliceEnd > startTicks) {
output.AppendSlice(*audio, startTicks, sliceEnd);
}
// Play silence where the track has ended
output.AppendNullData(endTicks - sliceEnd);
NS_ASSERTION(endTicks == sliceEnd || track->IsEnded(),
"Ran out of data but track not ended?");
output.ApplyVolume(volume);
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing samples for %f to %f (samples %lld to %lld)",
aStream, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
startTicks, endTicks));
toWrite = TimeToTicksRoundDown(IdealAudioRate(), end - aFrom);
}
if (blocked) {
output.InsertNullDataAtStart(toWrite);
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld blocking-silence samples for %f to %f (%ld to %ld)\n",
aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
offset, offset + toWrite));
ticksNeeded -= toWrite;
} else {
TrackTicks endTicksNeeded = offset + toWrite;
TrackTicks endTicksAvailable = audio->GetDuration();
if (endTicksNeeded <= endTicksAvailable) {
output.AppendSlice(*audio, offset, endTicksNeeded);
} else {
MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not ended.");
// If we are at the end of the track, maybe write the remaining
// samples, and pad with/output silence.
if (endTicksNeeded > endTicksAvailable &&
offset < endTicksAvailable) {
output.AppendSlice(*audio, offset, endTicksAvailable);
ticksNeeded -= endTicksAvailable - offset;
toWrite -= endTicksAvailable - offset;
}
output.AppendNullData(toWrite);
}
output.ApplyVolume(volume);
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld samples for %f to %f (samples %ld to %ld)\n",
aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
offset, endTicksNeeded));
ticksNeeded -= toWrite;
}
// Need unique id for stream & track - and we want it to match the inserter
output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
audioOutput.mStream);
t = end;
offset += toWrite;
audioOutput.mLastTickWritten += toWrite;
}
// Need unique id for stream & track - and we want it to match the inserter
output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
audioOutput.mStream, mMixer);
}
return ticksWritten;
}
static void
@ -1239,6 +1301,9 @@ MediaStreamGraphImpl::RunThread()
bool allBlockedForever = true;
// True when we've done ProcessInput for all processed streams.
bool doneAllProducing = false;
// This is the number of frame that are written to the AudioStreams, for
// this cycle.
TrackTicks ticksPlayed = 0;
// Figure out what each stream wants to do
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
MediaStream* stream = mStreams[i];
@ -1275,7 +1340,13 @@ MediaStreamGraphImpl::RunThread()
if (mRealtime) {
// Only playback audio and video in real-time mode
CreateOrDestroyAudioStreams(prevComputedTime, stream);
PlayAudio(stream, prevComputedTime, mStateComputedTime);
TrackTicks ticksPlayedForThisStream = PlayAudio(stream, prevComputedTime, mStateComputedTime);
if (!ticksPlayed) {
ticksPlayed = ticksPlayedForThisStream;
} else {
MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed,
"Each stream should have the same number of frame.");
}
PlayVideo(stream);
}
SourceMediaStream* is = stream->AsSourceStream();
@ -1287,6 +1358,11 @@ MediaStreamGraphImpl::RunThread()
allBlockedForever = false;
}
}
if (mMixer) {
mMixer->FinishMixing();
}
if (ensureNextIteration || !allBlockedForever) {
EnsureNextIteration();
}
@ -1392,12 +1468,6 @@ MediaStreamGraphImpl::ForceShutDown()
}
}
void
MediaStreamGraphImpl::Init()
{
AudioStream::InitPreferredSampleRate();
}
namespace {
class MediaStreamGraphInitThreadRunnable : public nsRunnable {
@ -1410,7 +1480,6 @@ public:
{
char aLocal;
profiler_register_thread("MediaStreamGraph", &aLocal);
mGraph->Init();
mGraph->RunThread();
return NS_OK;
}
@ -1782,7 +1851,7 @@ MediaStream::EnsureTrack(TrackID aTrackId, TrackRate aSampleRate)
nsAutoPtr<MediaSegment> segment(new AudioSegment());
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
MediaStreamListener* l = mListeners[j];
l->NotifyQueuedTrackChanges(Graph(), aTrackId, aSampleRate, 0,
l->NotifyQueuedTrackChanges(Graph(), aTrackId, IdealAudioRate(), 0,
MediaStreamListener::TRACK_EVENT_CREATED,
*segment);
}
@ -2129,7 +2198,10 @@ SourceMediaStream::AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart,
MutexAutoLock lock(mMutex);
TrackData* data = mUpdateTracks.AppendElement();
data->mID = aID;
data->mRate = aRate;
data->mInputRate = aRate;
// We resample all audio input tracks to the sample rate of the audio mixer.
data->mOutputRate = aSegment->GetType() == MediaSegment::AUDIO ?
IdealAudioRate() : aRate;
data->mStart = aStart;
data->mCommands = TRACK_CREATE;
data->mData = aSegment;
@ -2139,6 +2211,28 @@ SourceMediaStream::AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart,
}
}
void
SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment)
{
if (aSegment->GetType() != MediaSegment::AUDIO ||
aTrackData->mInputRate == IdealAudioRate()) {
return;
}
AudioSegment* segment = static_cast<AudioSegment*>(aSegment);
if (!aTrackData->mResampler) {
int channels = segment->ChannelCount();
SpeexResamplerState* state = speex_resampler_init(channels,
aTrackData->mInputRate,
IdealAudioRate(),
SPEEX_RESAMPLER_QUALITY_DEFAULT,
nullptr);
if (state) {
aTrackData->mResampler.own(state);
}
}
segment->ResampleChunks(aTrackData->mResampler);
}
bool
SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment)
{
@ -2158,6 +2252,8 @@ SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegme
// or inserting into the graph
ApplyTrackDisabling(aID, aSegment, aRawSegment);
ResampleAudioToGraphSampleRate(track, aSegment);
// Must notify first, since AppendFrom() will empty out aSegment
NotifyDirectConsumers(track, aRawSegment ? aRawSegment : aSegment);
track->mData->AppendFrom(aSegment); // note: aSegment is now dead
@ -2182,7 +2278,7 @@ SourceMediaStream::NotifyDirectConsumers(TrackData *aTrack,
for (uint32_t j = 0; j < mDirectListeners.Length(); ++j) {
MediaStreamDirectListener* l = mDirectListeners[j];
TrackTicks offset = 0; // FIX! need a separate TrackTicks.... or the end of the internal buffer
l->NotifyRealtimeData(static_cast<MediaStreamGraph*>(GraphImpl()), aTrack->mID, aTrack->mRate,
l->NotifyRealtimeData(static_cast<MediaStreamGraph*>(GraphImpl()), aTrack->mID, aTrack->mOutputRate,
offset, aTrack->mCommands, *aSegment);
}
}
@ -2295,6 +2391,20 @@ SourceMediaStream::GetBufferedTicks(TrackID aID)
return 0;
}
void
SourceMediaStream::RegisterForAudioMixing()
{
MutexAutoLock lock(mMutex);
mNeedsMixing = true;
}
bool
SourceMediaStream::NeedsMixing()
{
MutexAutoLock lock(mMutex);
return mNeedsMixing;
}
void
MediaInputPort::Init()
{
@ -2479,6 +2589,7 @@ MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime)
, mNonRealtimeProcessing(false)
, mStreamOrderDirty(false)
, mLatencyLog(AsyncLatencyLogger::Get())
, mMixer(nullptr)
{
#ifdef PR_LOGGING
if (!gMediaStreamGraphLog) {
@ -2521,6 +2632,8 @@ MediaStreamGraph::GetInstance()
gGraph = new MediaStreamGraphImpl(true);
STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", gGraph));
AudioStream::InitPreferredSampleRate();
}
return gGraph;

Просмотреть файл

@ -16,9 +16,19 @@
#include "VideoFrameContainer.h"
#include "VideoSegment.h"
#include "MainThreadUtils.h"
#include "nsAutoRef.h"
#include "speex/speex_resampler.h"
#include "AudioMixer.h"
class nsIRunnable;
template <>
class nsAutoRefTraits<SpeexResamplerState> : public nsPointerRefTraits<SpeexResamplerState>
{
public:
static void Release(SpeexResamplerState* aState) { speex_resampler_destroy(aState); }
};
namespace mozilla {
class DOMMediaStream;
@ -89,9 +99,11 @@ class MediaStreamGraph;
* attached to a stream that has already finished, we'll call NotifyFinished.
*/
class MediaStreamListener {
public:
protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~MediaStreamListener() {}
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStreamListener)
enum Consumption {
@ -291,6 +303,9 @@ public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStream)
MediaStream(DOMMediaStream* aWrapper);
protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~MediaStream()
{
MOZ_COUNT_DTOR(MediaStream);
@ -299,6 +314,7 @@ public:
"All main thread listeners should have been removed");
}
public:
/**
* Returns the graph that owns this stream.
*/
@ -557,6 +573,8 @@ protected:
// Amount of time that we've wanted to play silence because of the stream
// blocking.
MediaTime mBlockedAudioTime;
// Last tick written to the audio output.
TrackTicks mLastTickWritten;
nsAutoPtr<AudioStream> mStream;
TrackID mTrackID;
};
@ -656,6 +674,9 @@ public:
*/
void AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart,
MediaSegment* aSegment);
struct TrackData;
void ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment);
/**
* Append media data to a track. Ownership of aSegment remains with the caller,
* but aSegment is emptied.
@ -746,7 +767,13 @@ public:
*/
struct TrackData {
TrackID mID;
TrackRate mRate;
// Sample rate of the input data.
TrackRate mInputRate;
// Sample rate of the output data, always equal to IdealAudioRate()
TrackRate mOutputRate;
// Resampler if the rate of the input track does not match the
// MediaStreamGraph's.
nsAutoRef<SpeexResamplerState> mResampler;
TrackTicks mStart;
// Each time the track updates are flushed to the media graph thread,
// this is cleared.
@ -758,6 +785,9 @@ public:
bool mHaveEnough;
};
void RegisterForAudioMixing();
bool NeedsMixing();
protected:
TrackData* FindDataForTrack(TrackID aID)
{
@ -791,6 +821,7 @@ protected:
bool mPullEnabled;
bool mUpdateFinished;
bool mDestroyed;
bool mNeedsMixing;
};
/**
@ -810,7 +841,8 @@ protected:
* the Destroy message is processed on the graph manager thread we disconnect
* the port and drop the graph's reference, destroying the object.
*/
class MediaInputPort {
class MediaInputPort MOZ_FINAL {
private:
// Do not call this constructor directly. Instead call aDest->AllocateInputPort.
MediaInputPort(MediaStream* aSource, ProcessedMediaStream* aDest,
uint32_t aFlags, uint16_t aInputNumber,
@ -825,6 +857,12 @@ class MediaInputPort {
MOZ_COUNT_CTOR(MediaInputPort);
}
// Private destructor, to discourage deletion outside of Release():
~MediaInputPort()
{
MOZ_COUNT_DTOR(MediaInputPort);
}
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaInputPort)
@ -841,10 +879,6 @@ public:
// stream.
FLAG_BLOCK_OUTPUT = 0x02
};
~MediaInputPort()
{
MOZ_COUNT_DTOR(MediaInputPort);
}
// Called on graph manager thread
// Do not call these from outside MediaStreamGraph.cpp!
@ -886,7 +920,7 @@ public:
*/
void SetGraphImpl(MediaStreamGraphImpl* aGraph);
protected:
private:
friend class MediaStreamGraphImpl;
friend class MediaStream;
friend class ProcessedMediaStream;
@ -994,7 +1028,7 @@ protected:
bool mInCycle;
};
// Returns ideal audio rate for processing
// Returns ideal audio rate for processing.
inline TrackRate IdealAudioRate() { return AudioStream::PreferredSampleRate(); }
/**

Просмотреть файл

@ -13,12 +13,15 @@
#include "nsIThread.h"
#include "nsIRunnable.h"
#include "Latency.h"
#include "mozilla/WeakPtr.h"
namespace mozilla {
template <typename T>
class LinkedList;
class AudioMixer;
/**
* Assume we can run an iteration of the MediaStreamGraph loop in this much time
* or less.
@ -323,9 +326,9 @@ public:
MediaStream* aStream);
/**
* Queue audio (mix of stream audio and silence for blocked intervals)
* to the audio output stream.
* to the audio output stream. Returns the number of frames played.
*/
void PlayAudio(MediaStream* aStream, GraphTime aFrom, GraphTime aTo);
TrackTicks PlayAudio(MediaStream* aStream, GraphTime aFrom, GraphTime aTo);
/**
* Set the correct current video frame for stream aStream.
*/
@ -571,6 +574,10 @@ public:
* Hold a ref to the Latency logger
*/
nsRefPtr<AsyncLatencyLogger> mLatencyLog;
/**
* If this is not null, all the audio output for the MSG will be mixed down.
*/
nsAutoPtr<AudioMixer> mMixer;
};
}

Просмотреть файл

@ -19,6 +19,9 @@ namespace mozilla {
class ThreadSharedObject {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ThreadSharedObject)
protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~ThreadSharedObject() {}
};

Просмотреть файл

@ -190,6 +190,14 @@ TextTrack::ReadyState() const
return mReadyState;
}
void
TextTrack::SetReadyState(uint32_t aReadyState)
{
if (aReadyState <= TextTrackReadyState::FailedToLoad) {
SetReadyState(static_cast<TextTrackReadyState>(aReadyState));
}
}
void
TextTrack::SetReadyState(TextTrackReadyState aState)
{

Просмотреть файл

@ -106,6 +106,7 @@ public:
TextTrackReadyState ReadyState() const;
void SetReadyState(TextTrackReadyState aState);
void SetReadyState(uint32_t aReadyState);
void AddCue(TextTrackCue& aCue);
void RemoveCue(TextTrackCue& aCue, ErrorResult& aRv);

Просмотреть файл

@ -78,7 +78,7 @@ WebVTTListener::LoadResource()
rv = mParserWrapper->Watch(this);
NS_ENSURE_SUCCESS(rv, rv);
mElement->mTrack->SetReadyState(TextTrackReadyState::Loading);
mElement->SetReadyState(TextTrackReadyState::Loading);
return NS_OK;
}
@ -106,13 +106,15 @@ WebVTTListener::OnStopRequest(nsIRequest* aRequest,
nsISupports* aContext,
nsresult aStatus)
{
if (mElement->ReadyState() != TextTrackReadyState::FailedToLoad) {
TextTrack* track = mElement->Track();
track->SetReadyState(TextTrackReadyState::Loaded);
if (NS_FAILED(aStatus)) {
mElement->SetReadyState(TextTrackReadyState::FailedToLoad);
}
// Attempt to parse any final data the parser might still have.
mParserWrapper->Flush();
return NS_OK;
if (mElement->ReadyState() != TextTrackReadyState::FailedToLoad) {
mElement->SetReadyState(TextTrackReadyState::Loaded);
}
return aStatus;
}
NS_METHOD
@ -179,5 +181,16 @@ WebVTTListener::OnRegion(JS::Handle<JS::Value> aRegion, JSContext* aCx)
return NS_OK;
}
NS_IMETHODIMP
WebVTTListener::OnParsingError(int32_t errorCode, JSContext* cx)
{
// We only care about files that have a bad WebVTT file signature right now
// as that means the file failed to load.
if (errorCode == ErrorCodes::BadSignature) {
mElement->SetReadyState(TextTrackReadyState::FailedToLoad);
}
return NS_OK;
}
} // namespace dom
} // namespace mozilla

Просмотреть файл

@ -46,6 +46,10 @@ public:
nsresult LoadResource();
private:
// List of error codes returned from the WebVTT parser that we care about.
enum ErrorCodes {
BadSignature = 0
};
static NS_METHOD ParseChunk(nsIInputStream* aInStream, void* aClosure,
const char* aFromSegment, uint32_t aToOffset,
uint32_t aCount, uint32_t* aWriteCount);

Просмотреть файл

@ -0,0 +1,155 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "AudioMixer.h"
#include <assert.h>
using mozilla::AudioDataValue;
using mozilla::AudioSampleFormat;
/* In this test, the different audio stream and channels are always created to
* cancel each other. */
void MixingDone(AudioDataValue* aData, AudioSampleFormat aFormat, uint32_t aChannels, uint32_t aFrames)
{
bool silent = true;
for (uint32_t i = 0; i < aChannels * aFrames; i++) {
if (aData[i] != 0.0) {
if (aFormat == mozilla::AUDIO_FORMAT_S16) {
fprintf(stderr, "Sample at %d is not silent: %d\n", i, (short)aData[i]);
} else {
fprintf(stderr, "Sample at %d is not silent: %f\n", i, (float)aData[i]);
}
silent = false;
}
}
if (!silent) {
MOZ_CRASH();
}
}
/* Helper function to give us the maximum and minimum value that don't clip,
* for a given sample format (integer or floating-point). */
template<typename T>
T GetLowValue();
template<typename T>
T GetHighValue();
template<>
float GetLowValue<float>() {
return -1.0;
}
template<>
short GetLowValue<short>() {
return -INT16_MAX;
}
template<>
float GetHighValue<float>() {
return 1.0;
}
template<>
short GetHighValue<short>() {
return INT16_MAX;
}
void FillBuffer(AudioDataValue* aBuffer, uint32_t aLength, AudioDataValue aValue)
{
AudioDataValue* end = aBuffer + aLength;
while (aBuffer != end) {
*aBuffer++ = aValue;
}
}
int main(int argc, char* argv[]) {
const uint32_t CHANNEL_LENGTH = 256;
AudioDataValue a[CHANNEL_LENGTH * 2];
AudioDataValue b[CHANNEL_LENGTH * 2];
FillBuffer(a, CHANNEL_LENGTH, GetLowValue<AudioDataValue>());
FillBuffer(a + CHANNEL_LENGTH, CHANNEL_LENGTH, GetHighValue<AudioDataValue>());
FillBuffer(b, CHANNEL_LENGTH, GetHighValue<AudioDataValue>());
FillBuffer(b + CHANNEL_LENGTH, CHANNEL_LENGTH, GetLowValue<AudioDataValue>());
{
int iterations = 2;
mozilla::AudioMixer mixer(MixingDone);
fprintf(stderr, "Test AudioMixer constant buffer length.\n");
while (iterations--) {
mixer.Mix(a, 2, CHANNEL_LENGTH);
mixer.Mix(b, 2, CHANNEL_LENGTH);
mixer.FinishMixing();
}
}
{
mozilla::AudioMixer mixer(MixingDone);
fprintf(stderr, "Test AudioMixer variable buffer length.\n");
FillBuffer(a, CHANNEL_LENGTH / 2, GetLowValue<AudioDataValue>());
FillBuffer(a + CHANNEL_LENGTH / 2, CHANNEL_LENGTH / 2, GetLowValue<AudioDataValue>());
FillBuffer(b, CHANNEL_LENGTH / 2, GetHighValue<AudioDataValue>());
FillBuffer(b + CHANNEL_LENGTH / 2, CHANNEL_LENGTH / 2, GetHighValue<AudioDataValue>());
mixer.Mix(a, 2, CHANNEL_LENGTH / 2);
mixer.Mix(b, 2, CHANNEL_LENGTH / 2);
mixer.FinishMixing();
FillBuffer(a, CHANNEL_LENGTH, GetLowValue<AudioDataValue>());
FillBuffer(a + CHANNEL_LENGTH, CHANNEL_LENGTH, GetHighValue<AudioDataValue>());
FillBuffer(b, CHANNEL_LENGTH, GetHighValue<AudioDataValue>());
FillBuffer(b + CHANNEL_LENGTH, CHANNEL_LENGTH, GetLowValue<AudioDataValue>());
mixer.Mix(a, 2, CHANNEL_LENGTH);
mixer.Mix(b, 2, CHANNEL_LENGTH);
mixer.FinishMixing();
FillBuffer(a, CHANNEL_LENGTH / 2, GetLowValue<AudioDataValue>());
FillBuffer(a + CHANNEL_LENGTH / 2, CHANNEL_LENGTH / 2, GetLowValue<AudioDataValue>());
FillBuffer(b, CHANNEL_LENGTH / 2, GetHighValue<AudioDataValue>());
FillBuffer(b + CHANNEL_LENGTH / 2, CHANNEL_LENGTH / 2, GetHighValue<AudioDataValue>());
mixer.Mix(a, 2, CHANNEL_LENGTH / 2);
mixer.Mix(b, 2, CHANNEL_LENGTH / 2);
mixer.FinishMixing();
}
FillBuffer(a, CHANNEL_LENGTH, GetLowValue<AudioDataValue>());
FillBuffer(b, CHANNEL_LENGTH, GetHighValue<AudioDataValue>());
{
mozilla::AudioMixer mixer(MixingDone);
fprintf(stderr, "Test AudioMixer variable channel count.\n");
mixer.Mix(a, 1, CHANNEL_LENGTH);
mixer.Mix(b, 1, CHANNEL_LENGTH);
mixer.FinishMixing();
mixer.Mix(a, 1, CHANNEL_LENGTH);
mixer.Mix(b, 1, CHANNEL_LENGTH);
mixer.FinishMixing();
mixer.Mix(a, 1, CHANNEL_LENGTH);
mixer.Mix(b, 1, CHANNEL_LENGTH);
mixer.FinishMixing();
}
{
mozilla::AudioMixer mixer(MixingDone);
fprintf(stderr, "Test AudioMixer variable stream count.\n");
mixer.Mix(a, 2, CHANNEL_LENGTH);
mixer.Mix(b, 2, CHANNEL_LENGTH);
mixer.FinishMixing();
mixer.Mix(a, 2, CHANNEL_LENGTH);
mixer.Mix(b, 2, CHANNEL_LENGTH);
mixer.Mix(a, 2, CHANNEL_LENGTH);
mixer.Mix(b, 2, CHANNEL_LENGTH);
mixer.FinishMixing();
mixer.Mix(a, 2, CHANNEL_LENGTH);
mixer.Mix(b, 2, CHANNEL_LENGTH);
mixer.FinishMixing();
}
return 0;
}

Просмотреть файл

@ -0,0 +1,16 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
CPP_UNIT_TESTS += [
'TestAudioMixer.cpp',
]
FAIL_ON_WARNINGS = True
LOCAL_INCLUDES += [
'..',
]

Просмотреть файл

@ -38,7 +38,7 @@ private:
};
// Represent one encoded frame
class EncodedFrame
class EncodedFrame MOZ_FINAL
{
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodedFrame)
public:
@ -90,6 +90,11 @@ public:
FrameType GetFrameType() const { return mFrameType; }
void SetFrameType(FrameType aFrameType) { mFrameType = aFrameType; }
private:
// Private destructor, to discourage deletion outside of Release():
~EncodedFrame()
{
}
// Encoded data
nsTArray<uint8_t> mFrameData;
uint64_t mTimeStamp;

Просмотреть файл

@ -24,9 +24,12 @@ public:
METADATA_AMR,
METADATA_UNKNOWN // Metadata Kind not set
};
virtual ~TrackMetadataBase() {}
// Return the specific metadata kind
virtual MetadataKind GetKind() const = 0;
protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~TrackMetadataBase() {}
};
// The base class for audio metadata.

Просмотреть файл

@ -12,6 +12,8 @@ PARALLEL_DIRS += [
'webvtt'
]
TEST_TOOL_DIRS += ['compiledtest']
if CONFIG['MOZ_RAW']:
PARALLEL_DIRS += ['raw']
@ -58,6 +60,7 @@ EXPORTS += [
'AudioChannelFormat.h',
'AudioCompactor.h',
'AudioEventTimeline.h',
'AudioMixer.h',
'AudioNodeEngine.h',
'AudioNodeExternalInputStream.h',
'AudioNodeStream.h',

Просмотреть файл

@ -0,0 +1 @@
WEB

Просмотреть файл

@ -41,6 +41,7 @@ support-files =
badtags.ogg
badtags.ogg^headers^
basic.vtt
bad-signature.vtt
beta-phrasebook.ogg
beta-phrasebook.ogg^headers^
big-buck-bunny-unseekable.mp4
@ -440,6 +441,7 @@ skip-if = buildapp == 'b2g' # b2g(bug 901102) b2g-debug(bug 901102) b2g-desktop(
[test_streams_gc.html]
skip-if = buildapp == 'b2g' # b2g(Value being assigned to HTMLMediaElement.currentTime is not a finite floating-point value) b2g-debug(Value being assigned to HTMLMediaElement.currentTime is not a finite floating-point value) b2g-desktop(Value being assigned to HTMLMediaElement.currentTime is not a finite floating-point value)
[test_streams_tracks.html]
[test_trackelementevent.html]
[test_texttrack.html]
[test_texttrackcue.html]
[test_trackevent.html]

Просмотреть файл

@ -16,7 +16,8 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=881976
<pre id="test">
<script class="testbody" type="text/javascript">
SimpleTest.waitForExplicitFinish();
SpecialPowers.pushPrefEnv({"set": [["media.webvtt.enabled", true]]},
SpecialPowers.pushPrefEnv({"set": [["media.webvtt.enabled", true],
["media.webvtt.regions.enabled", true]]},
function() {
var video = document.createElement("video");

Просмотреть файл

@ -0,0 +1,62 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test for Bug 882677 - Implement the 'sourcing out of band text tracks' algorithm</title>
<script type="text/javascript" src="/MochiKit/MochiKit.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<p id="display"></p>
<div id="content" style="display: none">
</div>
<pre id="test">
<script class="testbody" type="text/javascript">
SimpleTest.waitForExplicitFinish();
SpecialPowers.pushPrefEnv({"set": [["media.webvtt.enabled", true],
["media.webvtt.regions.enabled", true]]},
function() {
var video = document.createElement("video");
video.src = "seek.webm";
video.preload = "auto";
var trackOne = document.createElement("track");
trackOne.src = "basic.vtt";
trackOne.kind = "subtitles";
var trackTwo = document.createElement("track");
trackTwo.src = "bad-signature.vtt";
trackTwo.kind = "captions";
var trackThree = document.createElement("track");
trackThree.src = "bad.vtt";
trackThree.kind = "chapters";
var events = 0;
function countEvent() {
ok(true, "A loaded or error event should have happened.");
events++ && events == 3 && SimpleTest.finish();
}
function shouldNotBeCalled() {
ok(false, "Event should not have been called.");
}
trackOne.addEventListener("loaded", countEvent);
trackOne.addEventListener("error", shouldNotBeCalled)
trackTwo.addEventListener("loaded", shouldNotBeCalled);
trackTwo.addEventListener("error", countEvent);
trackThree.addEventListener("loaded", shouldNotBeCalled);
trackThree.addEventListener("error", countEvent);
document.getElementById("content").appendChild(video);
video.appendChild(trackOne);
video.appendChild(trackTwo);
video.appendChild(trackThree);
}
);
</script>
</pre>
</body>
</html>

Просмотреть файл

@ -90,5 +90,25 @@ WebAudioUtils::SpeexResamplerProcess(SpeexResamplerState* aResampler,
#endif
}
int
WebAudioUtils::SpeexResamplerProcess(SpeexResamplerState* aResampler,
uint32_t aChannel,
const int16_t* aIn, uint32_t* aInLen,
int16_t* aOut, uint32_t* aOutLen)
{
#ifdef MOZ_SAMPLE_TYPE_S16
return speex_resampler_process_int(aResampler, aChannel, aIn, aInLen, aOut, aOutLen);
#else
nsAutoTArray<AudioDataValue, WEBAUDIO_BLOCK_SIZE*4> tmp1;
nsAutoTArray<AudioDataValue, WEBAUDIO_BLOCK_SIZE*4> tmp2;
tmp1.SetLength(*aInLen);
tmp2.SetLength(*aOutLen);
ConvertAudioSamples(aIn, tmp1.Elements(), *aInLen);
int result = speex_resampler_process_float(aResampler, aChannel, tmp1.Elements(), aInLen, tmp2.Elements(), aOutLen);
ConvertAudioSamples(tmp2.Elements(), aOut, *aOutLen);
return result;
#endif
}
}
}

Просмотреть файл

@ -19,7 +19,6 @@ typedef struct SpeexResamplerState_ SpeexResamplerState;
namespace mozilla {
class AudioNodeStream;
class MediaStream;
namespace dom {
@ -210,7 +209,13 @@ struct WebAudioUtils {
uint32_t aChannel,
const int16_t* aIn, uint32_t* aInLen,
float* aOut, uint32_t* aOutLen);
};
static int
SpeexResamplerProcess(SpeexResamplerState* aResampler,
uint32_t aChannel,
const int16_t* aIn, uint32_t* aInLen,
int16_t* aOut, uint32_t* aOutLen);
};
}
}

Просмотреть файл

@ -182,7 +182,7 @@ private:
uint32_t mSkipBytes;
};
class WebMBufferedState
class WebMBufferedState MOZ_FINAL
{
NS_INLINE_DECL_REFCOUNTING(WebMBufferedState)
@ -191,15 +191,16 @@ public:
MOZ_COUNT_CTOR(WebMBufferedState);
}
~WebMBufferedState() {
MOZ_COUNT_DTOR(WebMBufferedState);
}
void NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset);
bool CalculateBufferedForRange(int64_t aStartOffset, int64_t aEndOffset,
uint64_t* aStartTime, uint64_t* aEndTime);
private:
// Private destructor, to discourage deletion outside of Release():
~WebMBufferedState() {
MOZ_COUNT_DTOR(WebMBufferedState);
}
// Synchronizes access to the mTimeMapping array.
ReentrantMonitor mReentrantMonitor;

Просмотреть файл

@ -0,0 +1,55 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef AUDIOOUTPUTOBSERVER_H_
#define AUDIOOUTPUTOBSERVER_H_
#include "mozilla/StaticPtr.h"
namespace webrtc {
class SingleRwFifo;
}
namespace mozilla {
typedef struct FarEndAudioChunk_ {
uint16_t mSamples;
bool mOverrun;
int16_t mData[1]; // variable-length
} FarEndAudioChunk;
// XXX Really a singleton currently
class AudioOutputObserver // : public MSGOutputObserver
{
public:
AudioOutputObserver();
virtual ~AudioOutputObserver();
void Clear();
void InsertFarEnd(const AudioDataValue *aBuffer, uint32_t aSamples, bool aOverran,
int aFreq, int aChannels, AudioSampleFormat aFormat);
uint32_t PlayoutFrequency() { return mPlayoutFreq; }
uint32_t PlayoutChannels() { return mPlayoutChannels; }
FarEndAudioChunk *Pop();
uint32_t Size();
private:
uint32_t mPlayoutFreq;
uint32_t mPlayoutChannels;
nsAutoPtr<webrtc::SingleRwFifo> mPlayoutFifo;
uint32_t mChunkSize;
// chunking to 10ms support
nsAutoPtr<FarEndAudioChunk> mSaved;
uint32_t mSamplesSaved;
};
// XXX until there's a registration API in MSG
extern StaticAutoPtr<AudioOutputObserver> gFarendObserver;
}
#endif

Просмотреть файл

@ -101,7 +101,8 @@ public:
/* Change device configuration. */
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise) = 0;
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay) = 0;
/* Returns true if a source represents a fake capture device and
* false otherwise

Просмотреть файл

@ -48,7 +48,8 @@ public:
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay) { return NS_OK; };
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
@ -100,7 +101,8 @@ public:
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay) { return NS_OK; };
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,

Просмотреть файл

@ -279,7 +279,7 @@ MediaEngineTabVideoSource::Stop(mozilla::SourceMediaStream*, mozilla::TrackID)
}
nsresult
MediaEngineTabVideoSource::Config(bool, uint32_t, bool, uint32_t, bool, uint32_t)
MediaEngineTabVideoSource::Config(bool, uint32_t, bool, uint32_t, bool, uint32_t, int32_t)
{
return NS_OK;
}

Просмотреть файл

@ -26,7 +26,7 @@ class MediaEngineTabVideoSource : public MediaEngineVideoSource, nsIDOMEventList
virtual nsresult Snapshot(uint32_t, nsIDOMFile**);
virtual void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime, mozilla::TrackTicks&);
virtual nsresult Stop(mozilla::SourceMediaStream*, mozilla::TrackID);
virtual nsresult Config(bool, uint32_t, bool, uint32_t, bool, uint32_t);
virtual nsresult Config(bool, uint32_t, bool, uint32_t, bool, uint32_t, int32_t);
virtual bool IsFake();
void Draw();

Просмотреть файл

@ -60,6 +60,8 @@ MediaEngineWebRTC::MediaEngineWebRTC(MediaEnginePrefs &aPrefs)
#else
AsyncLatencyLogger::Get()->AddRef();
#endif
// XXX
gFarendObserver = new AudioOutputObserver();
}
void

Просмотреть файл

@ -40,6 +40,7 @@
#include "webrtc/voice_engine/include/voe_volume_control.h"
#include "webrtc/voice_engine/include/voe_external_media.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_call_report.h"
// Video Engine
#include "webrtc/video_engine/include/vie_base.h"
@ -56,6 +57,7 @@
#endif
#include "NullTransport.h"
#include "AudioOutputObserver.h"
namespace mozilla {
@ -147,7 +149,8 @@ public:
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay) { return NS_OK; };
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
@ -258,10 +261,13 @@ public:
, mCapIndex(aIndex)
, mChannel(-1)
, mInitDone(false)
, mStarted(false)
, mSamples(0)
, mEchoOn(false), mAgcOn(false), mNoiseOn(false)
, mEchoCancel(webrtc::kEcDefault)
, mAGC(webrtc::kAgcDefault)
, mNoiseSuppress(webrtc::kNsDefault)
, mPlayoutDelay(0)
, mNullTransport(nullptr) {
MOZ_ASSERT(aVoiceEnginePtr);
mState = kReleased;
@ -281,7 +287,8 @@ public:
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise);
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
@ -312,6 +319,7 @@ private:
ScopedCustomReleasePtr<webrtc::VoEExternalMedia> mVoERender;
ScopedCustomReleasePtr<webrtc::VoENetwork> mVoENetwork;
ScopedCustomReleasePtr<webrtc::VoEAudioProcessing> mVoEProcessing;
ScopedCustomReleasePtr<webrtc::VoECallReport> mVoECallReport;
// mMonitor protects mSources[] access/changes, and transitions of mState
// from kStarted to kStopped (which are combined with EndTrack()).
@ -323,6 +331,8 @@ private:
int mChannel;
TrackID mTrackID;
bool mInitDone;
bool mStarted;
int mSamples; // int to avoid conversions when comparing/etc to samplingFreq & length
nsString mDeviceName;
nsString mDeviceUUID;
@ -331,6 +341,7 @@ private:
webrtc::EcModes mEchoCancel;
webrtc::AgcModes mAGC;
webrtc::NsModes mNoiseSuppress;
int32_t mPlayoutDelay;
NullTransport *mNullTransport;
};
@ -344,6 +355,8 @@ public:
#ifdef MOZ_B2G_CAMERA
AsyncLatencyLogger::Get()->Release();
#endif
// XXX
gFarendObserver = nullptr;
}
// Clients should ensure to clean-up sources video/audio sources

Просмотреть файл

@ -3,6 +3,15 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaEngineWebRTC.h"
#include <stdio.h>
#include <algorithm>
#include "mozilla/Assertions.h"
// scoped_ptr.h uses FF
#ifdef FF
#undef FF
#endif
#include "webrtc/modules/audio_device/opensl/single_rw_fifo.h"
#define CHANNELS 1
#define ENCODING "L16"
@ -12,6 +21,13 @@
#define SAMPLE_FREQUENCY 16000
#define SAMPLE_LENGTH ((SAMPLE_FREQUENCY*10)/1000)
// These are restrictions from the webrtc.org code
#define MAX_CHANNELS 2
#define MAX_SAMPLING_FREQ 48000 // Hz - multiple of 100
#define MAX_AEC_FIFO_DEPTH 200 // ms - multiple of 10
static_assert(!(MAX_AEC_FIFO_DEPTH % 10), "Invalid MAX_AEC_FIFO_DEPTH");
namespace mozilla {
#ifdef LOG
@ -30,6 +46,117 @@ extern PRLogModuleInfo* GetMediaManagerLog();
*/
NS_IMPL_ISUPPORTS0(MediaEngineWebRTCAudioSource)
// XXX temp until MSG supports registration
StaticAutoPtr<AudioOutputObserver> gFarendObserver;
AudioOutputObserver::AudioOutputObserver()
: mPlayoutFreq(0)
, mPlayoutChannels(0)
, mChunkSize(0)
, mSamplesSaved(0)
{
// Buffers of 10ms chunks
mPlayoutFifo = new webrtc::SingleRwFifo(MAX_AEC_FIFO_DEPTH/10);
}
AudioOutputObserver::~AudioOutputObserver()
{
}
void
AudioOutputObserver::Clear()
{
while (mPlayoutFifo->size() > 0) {
(void) mPlayoutFifo->Pop();
}
}
FarEndAudioChunk *
AudioOutputObserver::Pop()
{
return (FarEndAudioChunk *) mPlayoutFifo->Pop();
}
uint32_t
AudioOutputObserver::Size()
{
return mPlayoutFifo->size();
}
// static
void
AudioOutputObserver::InsertFarEnd(const AudioDataValue *aBuffer, uint32_t aSamples, bool aOverran,
int aFreq, int aChannels, AudioSampleFormat aFormat)
{
if (mPlayoutChannels != 0) {
if (mPlayoutChannels != static_cast<uint32_t>(aChannels)) {
MOZ_CRASH();
}
} else {
MOZ_ASSERT(aChannels <= MAX_CHANNELS);
mPlayoutChannels = static_cast<uint32_t>(aChannels);
}
if (mPlayoutFreq != 0) {
if (mPlayoutFreq != static_cast<uint32_t>(aFreq)) {
MOZ_CRASH();
}
} else {
MOZ_ASSERT(aFreq <= MAX_SAMPLING_FREQ);
MOZ_ASSERT(!(aFreq % 100), "Sampling rate for far end data should be multiple of 100.");
mPlayoutFreq = aFreq;
mChunkSize = aFreq/100; // 10ms
}
#ifdef LOG_FAREND_INSERTION
static FILE *fp = fopen("insertfarend.pcm","wb");
#endif
if (mSaved) {
// flag overrun as soon as possible, and only once
mSaved->mOverrun = aOverran;
aOverran = false;
}
// Rechunk to 10ms.
// The AnalyzeReverseStream() and WebRtcAec_BufferFarend() functions insist on 10ms
// samples per call. Annoying...
while (aSamples) {
if (!mSaved) {
mSaved = (FarEndAudioChunk *) moz_xmalloc(sizeof(FarEndAudioChunk) +
(mChunkSize * aChannels - 1)*sizeof(int16_t));
mSaved->mSamples = mChunkSize;
mSaved->mOverrun = aOverran;
aOverran = false;
}
uint32_t to_copy = mChunkSize - mSamplesSaved;
if (to_copy > aSamples) {
to_copy = aSamples;
}
int16_t *dest = &(mSaved->mData[mSamplesSaved * aChannels]);
ConvertAudioSamples(aBuffer, dest, to_copy * aChannels);
#ifdef LOG_FAREND_INSERTION
if (fp) {
fwrite(&(mSaved->mData[mSamplesSaved * aChannels]), to_copy * aChannels, sizeof(int16_t), fp);
}
#endif
aSamples -= to_copy;
mSamplesSaved += to_copy;
if (mSamplesSaved >= mChunkSize) {
int free_slots = mPlayoutFifo->capacity() - mPlayoutFifo->size();
if (free_slots <= 0) {
// XXX We should flag an overrun for the reader. We can't drop data from it due to
// thread safety issues.
break;
} else {
mPlayoutFifo->Push((int8_t *) mSaved.forget()); // takes ownership
mSamplesSaved = 0;
}
}
}
}
void
MediaEngineWebRTCAudioSource::GetName(nsAString& aName)
{
@ -53,18 +180,27 @@ MediaEngineWebRTCAudioSource::GetUUID(nsAString& aUUID)
nsresult
MediaEngineWebRTCAudioSource::Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise)
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay)
{
LOG(("Audio config: aec: %d, agc: %d, noise: %d",
aEchoOn ? aEcho : -1,
aAgcOn ? aAGC : -1,
aNoiseOn ? aNoise : -1));
bool update_agc = (mAgcOn == aAgcOn);
bool update_noise = (mNoiseOn == aNoiseOn);
bool update_echo = (mEchoOn != aEchoOn);
bool update_agc = (mAgcOn != aAgcOn);
bool update_noise = (mNoiseOn != aNoiseOn);
mEchoOn = aEchoOn;
mAgcOn = aAgcOn;
mNoiseOn = aNoiseOn;
if ((webrtc::EcModes) aEcho != webrtc::kEcUnchanged) {
if (mEchoCancel != (webrtc::EcModes) aEcho) {
update_echo = true;
mEchoCancel = (webrtc::EcModes) aEcho;
}
}
if ((webrtc::AgcModes) aAGC != webrtc::kAgcUnchanged) {
if (mAGC != (webrtc::AgcModes) aAGC) {
update_agc = true;
@ -77,21 +213,21 @@ MediaEngineWebRTCAudioSource::Config(bool aEchoOn, uint32_t aEcho,
mNoiseSuppress = (webrtc::NsModes) aNoise;
}
}
mPlayoutDelay = aPlayoutDelay;
if (mInitDone) {
int error;
#if 0
// Until we can support feeding our full output audio from the browser
// through the MediaStream, this won't work. Or we need to move AEC to
// below audio input and output, perhaps invoked from here.
mEchoOn = aEchoOn;
if ((webrtc::EcModes) aEcho != webrtc::kEcUnchanged)
mEchoCancel = (webrtc::EcModes) aEcho;
mVoEProcessing->SetEcStatus(mEchoOn, aEcho);
#else
(void) aEcho; (void) aEchoOn; (void) mEchoCancel; // suppress warnings
#endif
if (update_echo &&
0 != (error = mVoEProcessing->SetEcStatus(mEchoOn, (webrtc::EcModes) aEcho))) {
LOG(("%s Error setting Echo Status: %d ",__FUNCTION__, error));
// Overhead of capturing all the time is very low (<0.1% of an audio only call)
if (mEchoOn) {
if (0 != (error = mVoEProcessing->SetEcMetricsStatus(true))) {
LOG(("%s Error setting Echo Metrics: %d ",__FUNCTION__, error));
}
}
}
if (update_agc &&
0 != (error = mVoEProcessing->SetAgcStatus(mAgcOn, (webrtc::AgcModes) aAGC))) {
LOG(("%s Error setting AGC Status: %d ",__FUNCTION__, error));
@ -158,6 +294,8 @@ MediaEngineWebRTCAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
AudioSegment* segment = new AudioSegment();
aStream->AddTrack(aID, SAMPLE_FREQUENCY, 0, segment);
aStream->AdvanceKnownTracksTime(STREAM_TIME_MAX);
// XXX Make this based on the pref.
aStream->RegisterForAudioMixing();
LOG(("Start audio for stream %p", aStream));
if (mState == kStarted) {
@ -170,10 +308,16 @@ MediaEngineWebRTCAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
// Make sure logger starts before capture
AsyncLatencyLogger::Get(true);
// Register output observer
// XXX
MOZ_ASSERT(gFarendObserver);
gFarendObserver->Clear();
// Configure audio processing in webrtc code
Config(mEchoOn, webrtc::kEcUnchanged,
mAgcOn, webrtc::kAgcUnchanged,
mNoiseOn, webrtc::kNsUnchanged);
mNoiseOn, webrtc::kNsUnchanged,
mPlayoutDelay);
if (mVoEBase->StartReceive(mChannel)) {
return NS_ERROR_FAILURE;
@ -266,6 +410,11 @@ MediaEngineWebRTCAudioSource::Init()
return;
}
mVoECallReport = webrtc::VoECallReport::GetInterface(mVoiceEngine);
if (!mVoECallReport) {
return;
}
mChannel = mVoEBase->CreateChannel();
if (mChannel < 0) {
return;
@ -362,6 +511,50 @@ MediaEngineWebRTCAudioSource::Process(int channel,
webrtc::ProcessingTypes type, sample* audio10ms,
int length, int samplingFreq, bool isStereo)
{
// On initial capture, throw away all far-end data except the most recent sample
// since it's already irrelevant and we want to keep avoid confusing the AEC far-end
// input code with "old" audio.
if (!mStarted) {
mStarted = true;
while (gFarendObserver->Size() > 1) {
FarEndAudioChunk *buffer = gFarendObserver->Pop(); // only call if size() > 0
free(buffer);
}
}
while (gFarendObserver->Size() > 0) {
FarEndAudioChunk *buffer = gFarendObserver->Pop(); // only call if size() > 0
if (buffer) {
int length = buffer->mSamples;
if (mVoERender->ExternalPlayoutData(buffer->mData,
gFarendObserver->PlayoutFrequency(),
gFarendObserver->PlayoutChannels(),
mPlayoutDelay,
length) == -1) {
return;
}
}
free(buffer);
}
#ifdef PR_LOGGING
mSamples += length;
if (mSamples > samplingFreq) {
mSamples %= samplingFreq; // just in case mSamples >> samplingFreq
if (PR_LOG_TEST(GetMediaManagerLog(), PR_LOG_DEBUG)) {
webrtc::EchoStatistics echo;
mVoECallReport->GetEchoMetricSummary(echo);
#define DUMP_STATVAL(x) (x).min, (x).max, (x).average
LOG(("Echo: ERL: %d/%d/%d, ERLE: %d/%d/%d, RERL: %d/%d/%d, NLP: %d/%d/%d",
DUMP_STATVAL(echo.erl),
DUMP_STATVAL(echo.erle),
DUMP_STATVAL(echo.rerl),
DUMP_STATVAL(echo.a_nlp)));
}
}
#endif
MonitorAutoLock lock(mMonitor);
if (mState != kStarted)
return;

Просмотреть файл

@ -12,7 +12,8 @@ EXPORTS += [
]
if CONFIG['MOZ_WEBRTC']:
EXPORTS += ['LoadManager.h',
EXPORTS += ['AudioOutputObserver.h',
'LoadManager.h',
'LoadManagerFactory.h',
'LoadMonitor.h',
'MediaEngineWebRTC.h']

Просмотреть файл

@ -67,8 +67,12 @@ namespace dom {
// VoiceData
class VoiceData
class VoiceData MOZ_FINAL
{
private:
// Private destructor, to discourage deletion outside of Release():
~VoiceData() {}
public:
VoiceData(nsISpeechService* aService, const nsAString& aUri,
const nsAString& aName, const nsAString& aLang, bool aIsLocal)
@ -78,8 +82,6 @@ public:
, mLang(aLang)
, mIsLocal(aIsLocal) {}
~VoiceData() {}
NS_INLINE_DECL_REFCOUNTING(VoiceData)
nsCOMPtr<nsISpeechService> mService;

Просмотреть файл

@ -19,7 +19,7 @@ WebVTTParserWrapper.prototype =
{
loadParser: function(window)
{
this.parser = new WebVTTParser(window, new TextDecoder("utf8"));
this.parser = new WebVTT.Parser(window, new TextDecoder("utf8"));
},
parse: function(data)
@ -43,19 +43,23 @@ WebVTTParserWrapper.prototype =
{
this.parser.oncue = callback.onCue;
this.parser.onregion = callback.onRegion;
this.parser.onparsingerror = function(e) {
// Passing the just the error code back is enough for our needs.
callback.onParsingError(("code" in e) ? e.code : -1);
};
},
convertCueToDOMTree: function(window, cue)
{
return WebVTTParser.convertCueToDOMTree(window, cue.text);
return WebVTT.convertCueToDOMTree(window, cue.text);
},
processCues: function(window, cues, overlay)
{
WebVTTParser.processCues(window, cues, overlay);
WebVTT.processCues(window, cues, overlay);
},
classDescription: "Wrapper for the JS WebVTTParser (vtt.js)",
classDescription: "Wrapper for the JS WebVTT implementation (vtt.js)",
classID: Components.ID(WEBVTTPARSERWRAPPER_CID),
QueryInterface: XPCOMUtils.generateQI([Ci.nsIWebVTTParserWrapper]),
classInfo: XPCOMUtils.generateCI({

Просмотреть файл

@ -7,7 +7,7 @@
/**
* Listener for a JS WebVTT parser (vtt.js).
*/
[scriptable, uuid(2953cf08-403e-4419-8d20-ce286aac026b)]
[scriptable, uuid(8a2d7780-2045-4a29-99f4-df15cae5fc49)]
interface nsIWebVTTListener : nsISupports
{
/**
@ -26,4 +26,12 @@ interface nsIWebVTTListener : nsISupports
*/
[implicit_jscontext]
void onRegion(in jsval region);
/**
* Is called when the WebVTT parser encounters a parsing error.
*
* @param error The error code of the ParserError the occured.
*/
[implicit_jscontext]
void onParsingError(in long errorCode);
};

Просмотреть файл

@ -24,27 +24,21 @@ repo.status(function(err, status) {
}
repo.checkout("master", function() {
repo.commits("master", 1, function(err, commits) {
var vttjs = fs.readFileSync(argv.d + "/vtt.js", 'utf8');
var vttjs = fs.readFileSync(argv.d + "/lib/vtt.js", 'utf8');
// Remove settings for VIM and Emacs.
vttjs = vttjs.replace(/\/\* -\*-.*-\*- \*\/\n/, '');
vttjs = vttjs.replace(/\/\* vim:.* \*\/\n/, '');
// Remove nodejs export statement.
vttjs = vttjs.replace(
'if (typeof module !== "undefined" && module.exports) {\n' +
' module.exports.WebVTTParser = WebVTTParser;\n}\n',
"");
// Concatenate header and vttjs code.
vttjs =
'/* This Source Code Form is subject to the terms of the Mozilla Public\n' +
' * License, v. 2.0. If a copy of the MPL was not distributed with this\n' +
' * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n\n' +
'this.EXPORTED_SYMBOLS = ["WebVTTParser"];\n\n' +
'this.EXPORTED_SYMBOLS = ["WebVTT"];\n\n' +
'/**\n' +
' * Code below is vtt.js the JS WebVTTParser.\n' +
' * Current source code can be found at http://github.com/andreasgal/vtt.js\n' +
' * Code below is vtt.js the JS WebVTT implementation.\n' +
' * Current source code can be found at http://github.com/mozilla/vtt.js\n' +
' *\n' +
' * Code taken from commit ' + commits[0].id + '\n' +
' */\n' +

Просмотреть файл

@ -2,13 +2,13 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
this.EXPORTED_SYMBOLS = ["WebVTTParser"];
this.EXPORTED_SYMBOLS = ["WebVTT"];
/**
* Code below is vtt.js the JS WebVTTParser.
* Current source code can be found at http://github.com/andreasgal/vtt.js
* Code below is vtt.js the JS WebVTT implementation.
* Current source code can be found at http://github.com/mozilla/vtt.js
*
* Code taken from commit ae06fb75793d3a8171a8c34666c2a3c32b5fde89
* Code taken from commit 2edc263af6003d539eb2ce442d6102e5d8b75fb5
*/
/**
* Copyright 2013 vtt.js Contributors
@ -29,13 +29,41 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
(function(global) {
function ParsingError(message) {
_objCreate = Object.create || (function() {
function F() {}
return function(o) {
if (arguments.length !== 1) {
throw new Error('Object.create shim only accepts one parameter.');
}
F.prototype = o;
return new F();
};
})();
// Creates a new ParserError object from an errorData object. The errorData
// object should have default code and message properties. The default message
// property can be overriden by passing in a message parameter.
// See ParsingError.Errors below for acceptable errors.
function ParsingError(errorData, message) {
this.name = "ParsingError";
this.message = message || "";
this.code = errorData.code;
this.message = message || errorData.message;
}
ParsingError.prototype = Object.create(Error.prototype);
ParsingError.prototype = _objCreate(Error.prototype);
ParsingError.prototype.constructor = ParsingError;
// ParsingError metadata for acceptable ParsingErrors.
ParsingError.Errors = {
BadSignature: {
code: 0,
message: "Malformed WebVTT signature."
},
BadTimeStamp: {
code: 1,
message: "Malformed time stamp."
}
};
// Try to parse input as a time stamp.
function parseTimeStamp(input) {
@ -64,7 +92,7 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
// A settings object holds key/value pairs and will ignore anything but the first
// assignment to a specific key.
function Settings() {
this.values = Object.create(null);
this.values = _objCreate(null);
}
Settings.prototype = {
@ -142,7 +170,7 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
function consumeTimeStamp() {
var ts = parseTimeStamp(input);
if (ts === null) {
throw new ParsingError("Malformed time stamp.");
throw new ParsingError(ParsingError.Errors.BadTimeStamp);
}
// Remove time stamp from input.
input = input.replace(/^[^\sa-zA-Z-]+/, "");
@ -226,7 +254,8 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
cue.startTime = consumeTimeStamp(); // (1) collect cue start time
skipWhitespace();
if (input.substr(0, 3) !== "-->") { // (3) next characters must match "-->"
throw new ParsingError("Malformed time stamp (time stamps must be separated by '-->').");
throw new ParsingError(ParsingError.Errors.BadTimeStamp,
"Malformed time stamp (time stamps must be separated by '-->').");
}
input = input.substr(3);
skipWhitespace();
@ -237,7 +266,7 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
consumeCueSettings(input, cue);
}
const ESCAPE = {
var ESCAPE = {
"&amp;": "&",
"&lt;": "<",
"&gt;": ">",
@ -246,7 +275,7 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
"&nbsp;": "\u00a0"
};
const TAG_NAME = {
var TAG_NAME = {
c: "span",
i: "i",
b: "b",
@ -257,12 +286,12 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
lang: "span"
};
const TAG_ANNOTATION = {
var TAG_ANNOTATION = {
v: "title",
lang: "lang"
};
const NEEDS_PARENT = {
var NEEDS_PARENT = {
rt: "ruby"
};
@ -600,7 +629,8 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
function determineBidi(cueDiv) {
var nodeStack = [],
text = "";
text = "",
charCode;
if (!cueDiv || !cueDiv.childNodes) {
return "ltr";
@ -617,16 +647,17 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
return null;
}
var node = nodeStack.pop();
if (node.textContent) {
var node = nodeStack.pop(),
text = node.textContent || node.innerText;
if (text) {
// TODO: This should match all unicode type B characters (paragraph
// separator characters). See issue #115.
var m = node.textContent.match(/^.*(\n|\r)/);
var m = text.match(/^.*(\n|\r)/);
if (m) {
nodeStack.length = 0;
return m[0];
}
return node.textContent;
return text;
}
if (node.tagName === "ruby") {
return nextTextNode(nodeStack);
@ -640,8 +671,11 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
pushNodes(nodeStack, cueDiv);
while ((text = nextTextNode(nodeStack))) {
for (var i = 0; i < text.length; i++) {
if (strongRTLChars.indexOf(text.charCodeAt(i)) !== -1) {
return "rtl";
charCode = text.charCodeAt(i);
for (var j = 0; j < strongRTLChars.length; j++) {
if (strongRTLChars[j] === charCode) {
return "rtl";
}
}
}
}
@ -675,9 +709,11 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
// div on 'this'.
StyleBox.prototype.applyStyles = function(styles, div) {
div = div || this.div;
Object.keys(styles).forEach(function(style) {
div.style[style] = styles[style];
});
for (var prop in styles) {
if (styles.hasOwnProperty(prop)) {
div.style[prop] = styles[prop];
}
}
};
StyleBox.prototype.formatStyle = function(val, unit) {
@ -767,7 +803,7 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
});
};
}
CueStyleBox.prototype = Object.create(StyleBox.prototype);
CueStyleBox.prototype = _objCreate(StyleBox.prototype);
CueStyleBox.prototype.constructor = CueStyleBox;
// Represents the co-ordinates of an Element in a way that we can easily
@ -1028,16 +1064,12 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
styleBox.move(bestPosition.toCSSCompatValues(containerBox));
}
function WebVTTParser(window, decoder) {
this.window = window;
this.state = "INITIAL";
this.buffer = "";
this.decoder = decoder || new TextDecoder("utf8");
this.regionList = [];
function WebVTT() {
// Nothing
}
// Helper to allow strings to be decoded instead of the default binary utf8 data.
WebVTTParser.StringDecoder = function() {
WebVTT.StringDecoder = function() {
return {
decode: function(data) {
if (!data) {
@ -1046,26 +1078,26 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
if (typeof data !== "string") {
throw new Error("Error - expected string data.");
}
return decodeURIComponent(escape(data));
return decodeURIComponent(encodeURIComponent(data));
}
};
};
WebVTTParser.convertCueToDOMTree = function(window, cuetext) {
WebVTT.convertCueToDOMTree = function(window, cuetext) {
if (!window || !cuetext) {
return null;
}
return parseContent(window, cuetext);
};
const FONT_SIZE_PERCENT = 0.05;
const FONT_STYLE = "sans-serif";
const CUE_BACKGROUND_PADDING = "1.5%";
var FONT_SIZE_PERCENT = 0.05;
var FONT_STYLE = "sans-serif";
var CUE_BACKGROUND_PADDING = "1.5%";
// Runs the processing model over the cues and regions passed to it.
// @param overlay A block level element (usually a div) that the computed cues
// and regions will be placed into.
WebVTTParser.processCues = function(window, cues, overlay) {
WebVTT.processCues = function(window, cues, overlay) {
if (!window || !cues || !overlay) {
return null;
}
@ -1127,7 +1159,24 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
});
};
WebVTTParser.prototype = {
WebVTT.Parser = function(window, decoder) {
this.window = window;
this.state = "INITIAL";
this.buffer = "";
this.decoder = decoder || new TextDecoder("utf8");
this.regionList = [];
};
WebVTT.Parser.prototype = {
// If the error is a ParsingError then report it to the consumer if
// possible. If it's not a ParsingError then throw it like normal.
reportOrThrowError: function(e) {
if (e instanceof ParsingError) {
this.onparsingerror && this.onparsingerror(e);
} else {
throw e;
}
},
parse: function (data) {
var self = this;
@ -1242,7 +1291,7 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
var m = line.match(/^WEBVTT([ \t].*)?$/);
if (!m || !m[0]) {
throw new ParsingError("Malformed WebVTT signature.");
throw new ParsingError(ParsingError.Errors.BadSignature);
}
self.state = "HEADER";
@ -1296,10 +1345,7 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
try {
parseCue(line, self.cue, self.regionList);
} catch (e) {
// If it's not a parsing error then throw it to the consumer.
if (!(e instanceof ParsingError)) {
throw e;
}
self.reportOrThrowError(e);
// In case of an error ignore rest of the cue.
self.cue = null;
self.state = "BADCUE";
@ -1330,11 +1376,9 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
}
}
} catch (e) {
// If it's not a parsing error then throw it to the consumer.
if (!(e instanceof ParsingError)) {
throw e;
}
// If we are currently parsing a cue, report what we have, and then the error.
self.reportOrThrowError(e);
// If we are currently parsing a cue, report what we have.
if (self.state === "CUETEXT" && self.cue && self.oncue) {
self.oncue(self.cue);
}
@ -1347,18 +1391,28 @@ this.EXPORTED_SYMBOLS = ["WebVTTParser"];
},
flush: function () {
var self = this;
// Finish decoding the stream.
self.buffer += self.decoder.decode();
// Synthesize the end of the current cue or region.
if (self.cue || self.state === "HEADER") {
self.buffer += "\n\n";
self.parse();
try {
// Finish decoding the stream.
self.buffer += self.decoder.decode();
// Synthesize the end of the current cue or region.
if (self.cue || self.state === "HEADER") {
self.buffer += "\n\n";
self.parse();
}
// If we've flushed, parsed, and we're still on the INITIAL state then
// that means we don't have enough of the stream to parse the first
// line.
if (self.state === "INITIAL") {
throw new ParsingError(ParsingError.Errors.BadSignature);
}
} catch(e) {
self.reportOrThrowError(e);
}
self.onflush && self.onflush();
return this;
}
};
global.WebVTTParser = WebVTTParser;
global.WebVTT = WebVTT;
}(this));

Просмотреть файл

@ -127,5 +127,6 @@ DOM4_MSG_DEF(ReadOnlyError, "A mutation operation was attempted in a READ_ONLY l
DOM4_MSG_DEF(InvalidStateError, "A mutation operation was attempted on a file storage that did not allow mutations.", NS_ERROR_DOM_FILEHANDLE_NOT_ALLOWED_ERR)
DOM4_MSG_DEF(AbortError, "A request was aborted, for example through a call to LockedFile.abort.", NS_ERROR_DOM_FILEHANDLE_ABORT_ERR)
DOM4_MSG_DEF(QuotaExceededError, "The current locked file exceeded its quota limitations.", NS_ERROR_DOM_FILEHANDLE_QUOTA_ERR)
DOM_MSG_DEF(NS_ERROR_DOM_JS_EXCEPTION, "A callback threw an exception")

Просмотреть файл

@ -3912,12 +3912,11 @@ nsStorage2SH::NewEnumerate(nsIXPConnectWrappedNative *wrapper, JSContext *cx,
if (enum_op == JSENUMERATE_NEXT && keys->Length() != 0) {
nsString& key = keys->ElementAt(0);
JSString *str =
JS_NewUCStringCopyN(cx, key.get(), key.Length());
JS::Rooted<JSString*> str(cx, JS_NewUCStringCopyN(cx, key.get(), key.Length()));
NS_ENSURE_TRUE(str, NS_ERROR_OUT_OF_MEMORY);
JS::Rooted<jsid> id(cx);
JS_ValueToId(cx, JS::StringValue(str), &id);
JS_StringToId(cx, str, &id);
*idp = id;
keys->RemoveElementAt(0);

Просмотреть файл

@ -27,7 +27,7 @@ static const nsDOMPerformanceNavigationType TYPE_RESERVED = 255;
}
}
class nsDOMNavigationTiming
class nsDOMNavigationTiming MOZ_FINAL
{
public:
nsDOMNavigationTiming();

Просмотреть файл

@ -2487,18 +2487,18 @@ nsGlobalWindow::SetNewDocument(nsIDocument* aDocument,
{
JSAutoCompartment ac(cx, mJSObject);
JS_SetParent(cx, mJSObject, newInnerWindow->mJSObject);
JS::Rooted<JSObject*> obj(cx, mJSObject);
JS::Rooted<JSObject*> newParent(cx, newInnerWindow->mJSObject);
JS_SetParent(cx, obj, newParent);
// Inform the nsJSContext, which is the canonical holder of the outer.
mContext->SetWindowProxy(obj);
NS_ASSERTION(!JS_IsExceptionPending(cx),
"We might overwrite a pending exception!");
XPCWrappedNativeScope* scope = xpc::GetObjectScope(mJSObject);
XPCWrappedNativeScope* scope = xpc::GetObjectScope(obj);
if (scope->mWaiverWrapperMap) {
scope->mWaiverWrapperMap->Reparent(cx, newInnerWindow->mJSObject);
scope->mWaiverWrapperMap->Reparent(cx, newParent);
}
}
}

Просмотреть файл

@ -2793,9 +2793,10 @@ NS_DOMWriteStructuredClone(JSContext* cx,
// Write the internals to the stream.
JSAutoCompartment ac(cx, dataArray);
JS::Rooted<JS::Value> arrayValue(cx, JS::ObjectValue(*dataArray));
return JS_WriteUint32Pair(writer, SCTAG_DOM_IMAGEDATA, 0) &&
JS_WriteUint32Pair(writer, width, height) &&
JS_WriteTypedArray(writer, JS::ObjectValue(*dataArray));
JS_WriteTypedArray(writer, arrayValue);
}
void

Просмотреть файл

@ -35,6 +35,9 @@ const BDADDR_ANY = "00:00:00:00:00:00";
const BDADDR_ALL = "ff:ff:ff:ff:ff:ff";
const BDADDR_LOCAL = "ff:ff:ff:00:00:00";
// A user friendly name for remote BT device.
const REMOTE_DEVICE_NAME = "Remote BT Device";
let Promise =
SpecialPowers.Cu.import("resource://gre/modules/Promise.jsm").Promise;
@ -77,6 +80,70 @@ function runEmulatorCmdSafe(aCommand) {
return deferred.promise;
}
/**
* Add a Bluetooth remote device to scatternet and set its properties.
*
* Use QEMU command 'bt remote add' to add a virtual Bluetooth remote
* and set its properties by setEmulatorDeviceProperty().
*
* Fulfill params:
* result -- bluetooth address of the remote device.
* Reject params: (none)
*
* @param aProperies
* A javascript object with zero or several properties for initializing
* the remote device. By now, the properies could be 'name' or
* 'discoverable'. It valid to put a null object or a javascript object
* which don't have any properies.
*
* @return A promise object.
*/
function addEmulatorRemoteDevice(aProperties) {
let address;
let promise = runEmulatorCmdSafe("bt remote add")
.then(function(aResults) {
address = aResults[0].toUpperCase();
});
for (let key in aProperties) {
let value = aProperties[key];
let propertyName = key;
promise = promise.then(function() {
return setEmulatorDeviceProperty(address, propertyName, value);
});
}
return promise.then(function() {
return address;
});
}
/**
* Remove Bluetooth remote devices in scatternet.
*
* Use QEMU command 'bt remote remove <addr>' to remove a specific virtual
* Bluetooth remote device in scatternet or remove them all by QEMU command
* 'bt remote remove BDADDR_ALL'.
*
* @param aAddress
* The string of Bluetooth address with format xx:xx:xx:xx:xx:xx.
*
* Fulfill params:
* result -- an array of emulator response lines.
* Reject params: (none)
*
* @return A promise object.
*/
function removeEmulatorRemoteDevice(aAddress) {
let cmd = "bt remote remove " + aAddress;
return runEmulatorCmdSafe(cmd)
.then(function(aResults) {
// 'bt remote remove <bd_addr>' returns a list of removed device one at a line.
// The last item is "OK".
return aResults.slice(0, -1);
});
}
/**
* Set a property for a Bluetooth device.
*
@ -126,6 +193,71 @@ function getEmulatorDeviceProperty(aAddress, aPropertyName) {
});
}
/**
* Start dicovering Bluetooth devices.
*
* Allows the device's adapter to start seeking for remote devices.
*
* Fulfill params: (none)
* Reject params: a DOMError
*
* @param aAdapter
* A BluetoothAdapter which is used to interact with local BT dev
*
* @return A deferred promise.
*/
function startDiscovery(aAdapter) {
let deferred = Promise.defer();
let request = aAdapter.startDiscovery();
request.onsuccess = function () {
log(" Start discovery - Success");
// TODO (bug 892207): Make Bluetooth APIs available for 3rd party apps.
// Currently, discovering state wouldn't change immediately here.
// We would turn on this check when the redesigned API are landed.
// is(aAdapter.discovering, true, "BluetoothAdapter.discovering");
deferred.resolve();
}
request.onerror = function (aEvent) {
ok(false, "Start discovery - Fail");
deferred.reject(aEvent.target.error);
}
return deferred.promise;
}
/**
* Stop dicovering Bluetooth devices.
*
* Allows the device's adapter to stop seeking for remote devices.
*
* Fulfill params: (none)
* Reject params: a DOMError
*
* @param aAdapter
* A BluetoothAdapter which is used to interact with local BT device.
*
* @return A deferred promise.
*/
function stopDiscovery(aAdapter) {
let deferred = Promise.defer();
let request = aAdapter.stopDiscovery();
request.onsuccess = function () {
log(" Stop discovery - Success");
// TODO (bug 892207): Make Bluetooth APIs available for 3rd party apps.
// Currently, discovering state wouldn't change immediately here.
// We would turn on this check when the redesigned API are landed.
// is(aAdapter.discovering, false, "BluetoothAdapter.discovering");
deferred.resolve();
}
request.onerror = function (aEvent) {
ok(false, "Stop discovery - Fail");
deferred.reject(aEvent.target.error);
}
return deferred.promise;
}
/**
* Get mozSettings value specified by @aKey.
*

Просмотреть файл

@ -7,3 +7,4 @@ qemu = true
[test_dom_BluetoothManager_adapteradded.js]
[test_dom_BluetoothAdapter_setters.js]
[test_dom_BluetoothAdapter_getters.js]
[test_dom_BluetoothAdapter_discovery.js]

Просмотреть файл

@ -0,0 +1,47 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: sw=2 ts=2 sts=2 et filetype=javascript
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
///////////////////////////////////////////////////////////////////////////////
// Test Purpose:
// To verify that discovery process of BluetoothAdapter is correct.
// Use B2G emulator commands to add/remote remote devices to simulate
// discovering behavior.
//
// Test Coverage:
// - BluetoothAdapter.startDiscovery()
// - BluetoothAdapter.stopDiscovery()
// - BluetoothAdapter.ondevicefound()
// - BluetoothAdapter.discovering [Temporarily turned off until BT API update]
//
///////////////////////////////////////////////////////////////////////////////
MARIONETTE_TIMEOUT = 60000;
MARIONETTE_HEAD_JS = 'head.js';
startBluetoothTest(true, function testCaseMain(aAdapter) {
log("Testing the discovery process of BluetoothAdapter ...");
// The properties of remote device.
let theProperties = {
"name": REMOTE_DEVICE_NAME,
"discoverable": true
};
return Promise.resolve()
.then(() => removeEmulatorRemoteDevice(BDADDR_ALL))
.then(() => addEmulatorRemoteDevice(/*theProperties*/ null))
.then(function(aRemoteAddress) {
let promises = [];
promises.push(waitForAdapterEvent(aAdapter, "devicefound"));
promises.push(startDiscovery(aAdapter));
return Promise.all(promises)
.then(function(aResults) {
is(aResults[0].device.address, aRemoteAddress, "BluetoothDevice.address");
});
})
.then(() => stopDiscovery(aAdapter))
.then(() => removeEmulatorRemoteDevice(BDADDR_ALL));
});

Просмотреть файл

@ -15,12 +15,6 @@ function debug(msg) {
// mozbrowser API clients.
docShell.isActive = true;
let infos = sendSyncMessage('browser-element-api:call',
{ 'msg_name': 'hello' })[0];
docShell.QueryInterface(Ci.nsIDocShellTreeItem).name = infos.name;
docShell.setFullscreenAllowed(infos.fullscreenAllowed);
function parentDocShell(docshell) {
if (!docshell) {
return null;
@ -57,3 +51,8 @@ if (!('BrowserElementIsPreloaded' in this)) {
}
var BrowserElementIsReady = true;
let infos = sendSyncMessage('browser-element-api:call',
{ 'msg_name': 'hello' })[0];
docShell.QueryInterface(Ci.nsIDocShellTreeItem).name = infos.name;
docShell.setFullscreenAllowed(infos.fullscreenAllowed);

Просмотреть файл

@ -366,9 +366,11 @@ BrowserElementParent.prototype = {
return true;
},
_recvHello: function(data) {
_recvHello: function() {
debug("recvHello");
this._ready = true;
// Inform our child if our owner element's document is invisible. Note
// that we must do so here, rather than in the BrowserElementParent
// constructor, because the BrowserElementChild may not be initialized when
@ -382,7 +384,7 @@ BrowserElementParent.prototype = {
fullscreenAllowed:
this._frameElement.hasAttribute('allowfullscreen') ||
this._frameElement.hasAttribute('mozallowfullscreen')
}
};
},
_fireCtxMenuEvent: function(data) {
@ -724,15 +726,31 @@ BrowserElementParent.prototype = {
} else {
let reqOld = XPCNativeWrapper.unwrap(activeInputFrame)
.setInputMethodActive(false);
reqOld.onsuccess = function() {
activeInputFrame = null;
this._sendSetInputMethodActiveDOMRequest(req, isActive);
// We wan't to continue regardless whether this req succeeded
reqOld.onsuccess = reqOld.onerror = function() {
let setActive = function() {
activeInputFrame = null;
this._sendSetInputMethodActiveDOMRequest(req, isActive);
}.bind(this);
if (this._ready) {
setActive();
return;
}
// Wait for the hello event from BrowserElementChild
let onReady = function(aMsg) {
if (this._isAlive() && (aMsg.data.msg_name === 'hello')) {
setActive();
this._mm.removeMessageListener('browser-element-api:call',
onReady);
}
}.bind(this);
this._mm.addMessageListener('browser-element-api:call', onReady);
}.bind(this);
reqOld.onerror = function() {
Services.DOMRequest.fireErrorAsync(req,
'Failed to deactivate the old input method: ' +
reqOld.error + '.');
};
}
} else {
this._sendSetInputMethodActiveDOMRequest(req, isActive);

Просмотреть файл

@ -22,11 +22,14 @@ public:
MOZ_COUNT_CTOR(CameraControlListener);
}
protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~CameraControlListener()
{
MOZ_COUNT_DTOR(CameraControlListener);
}
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(CameraControlListener);
enum HardwareState

Просмотреть файл

@ -121,7 +121,7 @@ public:
protected:
virtual ~nsDOMCameraControl();
class DOMCameraConfiguration : public dom::CameraConfiguration
class DOMCameraConfiguration MOZ_FINAL : public dom::CameraConfiguration
{
public:
NS_INLINE_DECL_REFCOUNTING(DOMCameraConfiguration)
@ -133,7 +133,8 @@ protected:
uint32_t mMaxFocusAreas;
uint32_t mMaxMeteringAreas;
protected:
private:
// Private destructor, to discourage deletion outside of Release():
~DOMCameraConfiguration();
};

Просмотреть файл

@ -42,11 +42,6 @@ public:
nsINode* aNode,
WidgetGUIEvent* aEvent);
~TextComposition()
{
// WARNING: mPresContext may be destroying, so, be careful if you touch it.
}
bool Destroyed() const { return !mPresContext; }
nsPresContext* GetPresContext() const { return mPresContext; }
nsINode* GetEventTargetNode() const { return mNode; }
@ -144,6 +139,12 @@ public:
};
private:
// Private destructor, to discourage deletion outside of Release():
~TextComposition()
{
// WARNING: mPresContext may be destroying, so, be careful if you touch it.
}
// This class holds nsPresContext weak. This instance shouldn't block
// destroying it. When the presContext is being destroyed, it's notified to
// IMEStateManager::OnDestroyPresContext(), and then, it destroy

Просмотреть файл

@ -110,7 +110,12 @@ FileHelper::OnStopRequest(nsIRequest* aRequest, nsISupports* aCtxt,
NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
if (NS_FAILED(aStatus)) {
mResultCode = NS_ERROR_DOM_FILEHANDLE_UNKNOWN_ERR;
if (aStatus == NS_ERROR_FILE_NO_DEVICE_SPACE) {
mResultCode = NS_ERROR_DOM_FILEHANDLE_QUOTA_ERR;
}
else {
mResultCode = NS_ERROR_DOM_FILEHANDLE_UNKNOWN_ERR;
}
}
Finish();

Просмотреть файл

@ -21,7 +21,7 @@ BEGIN_FILE_NAMESPACE
class MetadataHelper;
class MetadataParameters
class MetadataParameters MOZ_FINAL
{
friend class MetadataHelper;
@ -65,6 +65,11 @@ public:
}
private:
// Private destructor, to discourage deletion outside of Release():
~MetadataParameters()
{
}
uint64_t mSize;
int64_t mLastModified;
bool mSizeRequested;

Просмотреть файл

@ -16,16 +16,13 @@ namespace dom {
class FileSystemBase;
class FileSystemRequestParent
class FileSystemRequestParent MOZ_FINAL
: public PFileSystemRequestParent
{
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(FileSystemRequestParent)
public:
FileSystemRequestParent();
virtual
~FileSystemRequestParent();
bool
IsRunning()
{
@ -37,7 +34,12 @@ public:
virtual void
ActorDestroy(ActorDestroyReason why) MOZ_OVERRIDE;
private:
// Private destructor, to discourage deletion outside of Release():
virtual
~FileSystemRequestParent();
nsRefPtr<FileSystemBase> mFileSystem;
};

Просмотреть файл

@ -54,14 +54,17 @@ struct DatabaseInfoGuts
int64_t nextIndexId;
};
struct DatabaseInfo : public DatabaseInfoGuts
struct DatabaseInfo MOZ_FINAL : public DatabaseInfoGuts
{
DatabaseInfo()
: cloned(false)
{ }
private:
// Private destructor, to discourage deletion outside of Release():
~DatabaseInfo();
public:
static bool Get(const nsACString& aId,
DatabaseInfo** aInfo);
@ -142,7 +145,7 @@ struct ObjectStoreInfoGuts
nsTArray<IndexInfo> indexes;
};
struct ObjectStoreInfo : public ObjectStoreInfoGuts
struct ObjectStoreInfo MOZ_FINAL : public ObjectStoreInfoGuts
{
#ifdef NS_BUILD_REFCNT_LOGGING
ObjectStoreInfo();
@ -154,6 +157,7 @@ struct ObjectStoreInfo : public ObjectStoreInfoGuts
ObjectStoreInfo(ObjectStoreInfo& aOther);
private:
// Private destructor, to discourage deletion outside of Release():
#ifdef NS_BUILD_REFCNT_LOGGING
~ObjectStoreInfo();
#else

Просмотреть файл

@ -22,7 +22,7 @@ BEGIN_INDEXEDDB_NAMESPACE
class FileInfo;
class FileManager
class FileManager MOZ_FINAL
{
friend class FileInfo;
@ -38,9 +38,6 @@ public:
mInvalidated(false)
{ }
~FileManager()
{ }
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(FileManager)
PersistenceType Type()
@ -100,6 +97,11 @@ public:
static nsresult GetUsage(nsIFile* aDirectory, uint64_t* aUsage);
private:
// Private destructor, to discourage deletion outside of Release():
~FileManager()
{
}
PersistenceType mPersistenceType;
nsCString mGroup;
nsCString mOrigin;

Просмотреть файл

@ -2991,7 +2991,7 @@ CopyData(nsIInputStream* aInputStream, nsIOutputStream* aOutputStream)
uint32_t numRead;
rv = aInputStream->Read(copyBuffer, sizeof(copyBuffer), &numRead);
IDB_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
NS_ENSURE_SUCCESS(rv, rv);
if (!numRead) {
break;
@ -2999,16 +2999,16 @@ CopyData(nsIInputStream* aInputStream, nsIOutputStream* aOutputStream)
uint32_t numWrite;
rv = aOutputStream->Write(copyBuffer, numRead, &numWrite);
IDB_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
if (numWrite < numRead) {
// Must have hit the quota limit.
return NS_ERROR_DOM_INDEXEDDB_QUOTA_ERR;
if (rv == NS_ERROR_FILE_NO_DEVICE_SPACE) {
rv = NS_ERROR_DOM_INDEXEDDB_QUOTA_ERR;
}
NS_ENSURE_SUCCESS(rv, rv);
NS_ENSURE_TRUE(numWrite == numRead, NS_ERROR_FAILURE);
} while (true);
rv = aOutputStream->Flush();
IDB_ENSURE_SUCCESS(rv, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
@ -3251,6 +3251,11 @@ AddHelper::DoDatabaseWork(mozIStorageConnection* aConnection)
IDB_ENSURE_TRUE(outputStream, NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR);
rv = CopyData(inputStream, outputStream);
if (NS_FAILED(rv) &&
NS_ERROR_GET_MODULE(rv) != NS_ERROR_MODULE_DOM_INDEXEDDB) {
IDB_REPORT_INTERNAL_ERR();
rv = NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR;
}
NS_ENSURE_SUCCESS(rv, rv);
cloneFile.mFile->AddFileInfo(fileInfo);

Просмотреть файл

@ -40,7 +40,8 @@
let objectStore = trans.objectStore(objectStoreName);
request = objectStore.add(fileData.file, fileData.key);
request.addEventListener("error", new ExpectError("UnknownError", true));
request.addEventListener("error",
new ExpectError("QuotaExceededError", true));
request.onsuccess = unexpectedSuccessHandler;
event = yield undefined;

Просмотреть файл

@ -41,7 +41,8 @@
let blob = getNullBlob(DEFAULT_QUOTA);
request = lockedFile.write(blob);
request.addEventListener("error", new ExpectError("UnknownError", true));
request.addEventListener("error",
new ExpectError("QuotaExceededError", true));
request.onsuccess = unexpectedSuccessHandler;
event = yield undefined;

Просмотреть файл

@ -400,13 +400,30 @@ class nsDOMUserMediaStream : public DOMLocalMediaStream
{
public:
static already_AddRefed<nsDOMUserMediaStream>
CreateTrackUnionStream(nsIDOMWindow* aWindow, uint32_t aHintContents)
CreateTrackUnionStream(nsIDOMWindow* aWindow,
MediaEngineSource *aAudioSource,
MediaEngineSource *aVideoSource)
{
nsRefPtr<nsDOMUserMediaStream> stream = new nsDOMUserMediaStream();
stream->InitTrackUnionStream(aWindow, aHintContents);
DOMMediaStream::TrackTypeHints hints =
(aAudioSource ? DOMMediaStream::HINT_CONTENTS_AUDIO : 0) |
(aVideoSource ? DOMMediaStream::HINT_CONTENTS_VIDEO : 0);
nsRefPtr<nsDOMUserMediaStream> stream = new nsDOMUserMediaStream(aAudioSource);
stream->InitTrackUnionStream(aWindow, hints);
return stream.forget();
}
nsDOMUserMediaStream(MediaEngineSource *aAudioSource) :
mAudioSource(aAudioSource),
mEchoOn(true),
mAgcOn(false),
mNoiseOn(true),
mEcho(webrtc::kEcDefault),
mAgc(webrtc::kAgcDefault),
mNoise(webrtc::kNsDefault),
mPlayoutDelay(20)
{}
virtual ~nsDOMUserMediaStream()
{
Stop();
@ -436,6 +453,21 @@ public:
return false;
}
virtual void
AudioConfig(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAgc,
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay)
{
mEchoOn = aEchoOn;
mEcho = aEcho;
mAgcOn = aAgcOn;
mAgc = aAgc;
mNoiseOn = aNoiseOn;
mNoise = aNoise;
mPlayoutDelay = aPlayoutDelay;
}
virtual void RemoveDirectListener(MediaStreamDirectListener *aListener) MOZ_OVERRIDE
{
if (mSourceStream) {
@ -458,6 +490,14 @@ public:
// explicitly destroyed too.
nsRefPtr<SourceMediaStream> mSourceStream;
nsRefPtr<MediaInputPort> mPort;
nsRefPtr<MediaEngineSource> mAudioSource; // so we can turn on AEC
bool mEchoOn;
bool mAgcOn;
bool mNoiseOn;
uint32_t mEcho;
uint32_t mAgc;
uint32_t mNoise;
uint32_t mPlayoutDelay;
};
/**
@ -538,6 +578,12 @@ public:
NS_IMETHOD
Run()
{
int32_t aec = (int32_t) webrtc::kEcUnchanged;
int32_t agc = (int32_t) webrtc::kAgcUnchanged;
int32_t noise = (int32_t) webrtc::kNsUnchanged;
bool aec_on = false, agc_on = false, noise_on = false;
int32_t playout_delay = 0;
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
nsPIDOMWindow *window = static_cast<nsPIDOMWindow*>
(nsGlobalWindow::GetInnerWindowWithId(mWindowID));
@ -550,19 +596,39 @@ public:
return NS_OK;
}
// Create a media stream.
DOMMediaStream::TrackTypeHints hints =
(mAudioSource ? DOMMediaStream::HINT_CONTENTS_AUDIO : 0) |
(mVideoSource ? DOMMediaStream::HINT_CONTENTS_VIDEO : 0);
#ifdef MOZ_WEBRTC
// Right now these configs are only of use if webrtc is available
nsresult rv;
nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
if (NS_SUCCEEDED(rv)) {
nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
if (branch) {
branch->GetBoolPref("media.getusermedia.aec_enabled", &aec_on);
branch->GetIntPref("media.getusermedia.aec", &aec);
branch->GetBoolPref("media.getusermedia.agc_enabled", &agc_on);
branch->GetIntPref("media.getusermedia.agc", &agc);
branch->GetBoolPref("media.getusermedia.noise_enabled", &noise_on);
branch->GetIntPref("media.getusermedia.noise", &noise);
branch->GetIntPref("media.getusermedia.playout_delay", &playout_delay);
}
}
#endif
// Create a media stream.
nsRefPtr<nsDOMUserMediaStream> trackunion =
nsDOMUserMediaStream::CreateTrackUnionStream(window, hints);
nsDOMUserMediaStream::CreateTrackUnionStream(window, mAudioSource,
mVideoSource);
if (!trackunion) {
nsCOMPtr<nsIDOMGetUserMediaErrorCallback> error = mError.forget();
LOG(("Returning error for getUserMedia() - no stream"));
error->OnError(NS_LITERAL_STRING("NO_STREAM"));
return NS_OK;
}
trackunion->AudioConfig(aec_on, (uint32_t) aec,
agc_on, (uint32_t) agc,
noise_on, (uint32_t) noise,
playout_delay);
MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
nsRefPtr<SourceMediaStream> stream = gm->CreateSourceStream(nullptr);
@ -592,6 +658,13 @@ public:
TracksAvailableCallback* tracksAvailableCallback =
new TracksAvailableCallback(mManager, mSuccess, mWindowID, trackunion);
#ifdef MOZ_WEBRTC
mListener->AudioConfig(aec_on, (uint32_t) aec,
agc_on, (uint32_t) agc,
noise_on, (uint32_t) noise,
playout_delay);
#endif
// Dispatch to the media thread to ask it to start the sources,
// because that can take a while.
// Pass ownership of trackunion to the MediaOperationRunnable
@ -604,33 +677,6 @@ public:
mError.forget()));
mediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);
#ifdef MOZ_WEBRTC
// Right now these configs are only of use if webrtc is available
nsresult rv;
nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
if (NS_SUCCEEDED(rv)) {
nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
if (branch) {
int32_t aec = (int32_t) webrtc::kEcUnchanged;
int32_t agc = (int32_t) webrtc::kAgcUnchanged;
int32_t noise = (int32_t) webrtc::kNsUnchanged;
bool aec_on = false, agc_on = false, noise_on = false;
branch->GetBoolPref("media.peerconnection.aec_enabled", &aec_on);
branch->GetIntPref("media.peerconnection.aec", &aec);
branch->GetBoolPref("media.peerconnection.agc_enabled", &agc_on);
branch->GetIntPref("media.peerconnection.agc", &agc);
branch->GetBoolPref("media.peerconnection.noise_enabled", &noise_on);
branch->GetIntPref("media.peerconnection.noise", &noise);
mListener->AudioConfig(aec_on, (uint32_t) aec,
agc_on, (uint32_t) agc,
noise_on, (uint32_t) noise);
}
}
#endif
// We won't need mError now.
mError = nullptr;
return NS_OK;

Просмотреть файл

@ -127,7 +127,8 @@ public:
void
AudioConfig(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise)
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay)
{
if (mAudioSource) {
#ifdef MOZ_WEBRTC
@ -135,7 +136,7 @@ public:
RUN_ON_THREAD(mMediaThread,
WrapRunnable(nsRefPtr<MediaEngineSource>(mAudioSource), // threadsafe
&MediaEngineSource::Config,
aEchoOn, aEcho, aAgcOn, aAGC, aNoiseOn, aNoise),
aEchoOn, aEcho, aAgcOn, aAGC, aNoiseOn, aNoise, aPlayoutDelay),
NS_DISPATCH_NORMAL);
#endif
}

Просмотреть файл

@ -50,17 +50,32 @@ function doFailToSetPreferredNetworkType(preferredNetworkType, expectedError, ca
};
}
function getSupportedNetworkTypesFromSystemProperties(clientId, callback) {
let key = "ro.moz.ril." + clientId + ".network_types";
runEmulatorShell(["getprop", key], function(results) {
let result = results[0];
if (!result || result === "") {
// Copied from GECKO_SUPPORTED_NETWORK_TYPES_DEFAULT in dom/system/gonk/ril_consts.js.
result = "wcdma,gsm";
}
callback(result.split(","));
});
}
/* Test supportedNetworkTypes */
taskHelper.push(function testSupportedNetworkTypes() {
let supportedNetworkTypes = mobileConnection.supportedNetworkTypes;
ok(Array.isArray(supportedNetworkTypes), "supportedNetworkTypes should be an array");
ok(supportedNetworkTypes.indexOf("gsm") >= 0, "Should support 'gsm'");
ok(supportedNetworkTypes.indexOf("wcdma") >= 0, "Should support 'wcdma'");
ok(supportedNetworkTypes.indexOf("cdma") >= 0, "Should support 'cdma'");
ok(supportedNetworkTypes.indexOf("evdo") >= 0, "Should support 'evdo'");
taskHelper.runNext();
getSupportedNetworkTypesFromSystemProperties(0, function(testData) {
is(testData.length, supportedNetworkTypes.length);
for (let i = 0; i < testData.length; i++) {
ok(supportedNetworkTypes.indexOf(testData[i]) >= 0, "Should support '" + testData[i] + "'");
}
taskHelper.runNext();
});
});
/* Test switching to supported preferred types */

Просмотреть файл

@ -871,9 +871,10 @@ nsJSObjWrapper::NP_Enumerate(NPObject *npobj, NPIdentifier **idarray,
nsCxPusher pusher;
pusher.Push(cx);
AutoJSExceptionReporter reporter(cx);
JSAutoCompartment ac(cx, npjsobj->mJSObj);
JS::Rooted<JSObject*> jsobj(cx, npjsobj->mJSObj);
JSAutoCompartment ac(cx, jsobj);
JS::AutoIdArray ida(cx, JS_Enumerate(cx, npjsobj->mJSObj));
JS::AutoIdArray ida(cx, JS_Enumerate(cx, jsobj));
if (!ida) {
return false;
}

Просмотреть файл

@ -95,7 +95,7 @@ static bool EnsureGLContext()
return sPluginContext != nullptr;
}
class SharedPluginTexture {
class SharedPluginTexture MOZ_FINAL {
public:
NS_INLINE_DECL_REFCOUNTING(SharedPluginTexture)
@ -103,10 +103,6 @@ public:
{
}
~SharedPluginTexture()
{
}
nsNPAPIPluginInstance::TextureInfo Lock()
{
if (!EnsureGLContext()) {
@ -153,6 +149,11 @@ public:
}
private:
// Private destructor, to discourage deletion outside of Release():
~SharedPluginTexture()
{
}
nsNPAPIPluginInstance::TextureInfo mTextureInfo;
Mutex mLock;

Просмотреть файл

@ -77,7 +77,7 @@ FileQuotaStreamWithWrite<FileStreamBase>::Write(const char* aBuf,
if (!FileQuotaStreamWithWrite::
mQuotaObject->MaybeAllocateMoreSpace(offset, aCount)) {
return NS_ERROR_FAILURE;
return NS_ERROR_FILE_NO_DEVICE_SPACE;
}
}

Просмотреть файл

@ -68,7 +68,7 @@ private:
int64_t mSize;
};
class OriginInfo
class OriginInfo MOZ_FINAL
{
friend class GroupInfo;
friend class QuotaManager;
@ -83,11 +83,6 @@ public:
MOZ_COUNT_CTOR(OriginInfo);
}
~OriginInfo()
{
MOZ_COUNT_DTOR(OriginInfo);
}
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(OriginInfo)
int64_t
@ -97,6 +92,12 @@ public:
}
private:
// Private destructor, to discourage deletion outside of Release():
~OriginInfo()
{
MOZ_COUNT_DTOR(OriginInfo);
}
void
LockedDecreaseUsage(int64_t aSize);
@ -146,7 +147,7 @@ public:
}
};
class GroupInfo
class GroupInfo MOZ_FINAL
{
friend class GroupInfoPair;
friend class OriginInfo;
@ -160,11 +161,6 @@ public:
MOZ_COUNT_CTOR(GroupInfo);
}
~GroupInfo()
{
MOZ_COUNT_DTOR(GroupInfo);
}
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(GroupInfo)
bool
@ -180,6 +176,12 @@ public:
}
private:
// Private destructor, to discourage deletion outside of Release():
~GroupInfo()
{
MOZ_COUNT_DTOR(GroupInfo);
}
already_AddRefed<OriginInfo>
LockedGetOriginInfo(const nsACString& aOrigin);

Просмотреть файл

@ -33,7 +33,7 @@ class nsSMILTimeValueSpec;
// nsSMILInstanceTimes if its begin time changes. This notification is
// performed by the nsSMILInterval.
class nsSMILInstanceTime
class nsSMILInstanceTime MOZ_FINAL
{
public:
// Instance time source. Times generated by events, syncbase relationships,
@ -54,7 +54,7 @@ public:
nsSMILInstanceTimeSource aSource = SOURCE_NONE,
nsSMILTimeValueSpec* aCreator = nullptr,
nsSMILInterval* aBaseInterval = nullptr);
~nsSMILInstanceTime();
void Unlink();
void HandleChangedInterval(const nsSMILTimeContainer* aSrcContainer,
bool aBeginObjectChanged,
@ -99,7 +99,10 @@ public:
NS_INLINE_DECL_REFCOUNTING(nsSMILInstanceTime)
protected:
private:
// Private destructor, to discourage deletion outside of Release():
~nsSMILInstanceTime();
void SetBaseInterval(nsSMILInterval* aBaseInterval);
nsSMILTimeValue mTime;

Просмотреть файл

@ -234,10 +234,12 @@ class DOMStorageUsageBridge
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(DOMStorageUsageBridge)
virtual ~DOMStorageUsageBridge() {}
virtual const nsCString& Scope() = 0;
virtual void LoadUsage(const int64_t aUsage) = 0;
protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~DOMStorageUsageBridge() {}
};
class DOMStorageUsage : public DOMStorageUsageBridge

Просмотреть файл

@ -430,7 +430,7 @@ this.RIL_PREFERRED_NETWORK_TYPE_TO_GECKO = [
GECKO_PREFERRED_NETWORK_TYPE_LTE_ONLY
];
this.GECKO_SUPPORTED_NETWORK_TYPES_DEFAULT = "gsm,wcdma,cdma,evdo";
this.GECKO_SUPPORTED_NETWORK_TYPES_DEFAULT = "gsm,wcdma";
this.GECKO_SUPPORTED_NETWORK_TYPES = [
"gsm",
"wcdma",

Просмотреть файл

@ -27,6 +27,12 @@ class FileReaderSync MOZ_FINAL
{
NS_INLINE_DECL_REFCOUNTING(FileReaderSync)
private:
// Private destructor, to discourage deletion outside of Release():
~FileReaderSync()
{
}
nsresult ConvertStream(nsIInputStream *aStream, const char *aCharset,
nsAString &aResult);

Просмотреть файл

@ -37,11 +37,6 @@ public:
AssertIsOnMainThread();
}
~URLProxy()
{
MOZ_ASSERT(!mURL);
}
mozilla::dom::URL* URL()
{
return mURL;
@ -59,6 +54,12 @@ public:
}
private:
// Private destructor, to discourage deletion outside of Release():
~URLProxy()
{
MOZ_ASSERT(!mURL);
}
nsRefPtr<mozilla::dom::URL> mURL;
};

Просмотреть файл

@ -426,9 +426,10 @@ struct WorkerStructuredCloneCallbacks
// Write the internals to the stream.
JSAutoCompartment ac(aCx, dataArray);
JS::Rooted<JS::Value> arrayValue(aCx, JS::ObjectValue(*dataArray));
return JS_WriteUint32Pair(aWriter, SCTAG_DOM_IMAGEDATA, 0) &&
JS_WriteUint32Pair(aWriter, width, height) &&
JS_WriteTypedArray(aWriter, JS::ObjectValue(*dataArray));
JS_WriteTypedArray(aWriter, arrayValue);
}
}

Просмотреть файл

@ -381,9 +381,13 @@ private:
nsCOMPtr<nsILoadGroup> mLoadGroup;
nsCOMPtr<nsIPrincipal> mCallerPrincipal;
protected:
// This exists solely to suppress a warning from nsDerivedSafe
txCompileObserver();
// Private destructor, to discourage deletion outside of Release():
~txCompileObserver()
{
}
};
txCompileObserver::txCompileObserver(txMozillaXSLTProcessor* aProcessor,
@ -605,7 +609,12 @@ public:
TX_DECL_ACOMPILEOBSERVER
NS_INLINE_DECL_REFCOUNTING(txSyncCompileObserver)
protected:
private:
// Private destructor, to discourage deletion outside of Release():
~txSyncCompileObserver()
{
}
nsRefPtr<txMozillaXSLTProcessor> mProcessor;
};

Просмотреть файл

@ -22,7 +22,7 @@ class txDecimalFormat;
class txStripSpaceTest;
class txXSLKey;
class txStylesheet
class txStylesheet MOZ_FINAL
{
public:
class ImportFrame;
@ -32,7 +32,6 @@ public:
friend class ImportFrame;
txStylesheet();
~txStylesheet();
nsresult init();
NS_INLINE_DECL_REFCOUNTING(txStylesheet)
@ -107,6 +106,9 @@ public:
};
private:
// Private destructor, to discourage deletion outside of Release():
~txStylesheet();
nsresult addTemplate(txTemplateItem* aTemplate, ImportFrame* aImportFrame);
nsresult addGlobalVariable(txVariableItem* aVariable);
nsresult addFrames(txListIterator& aInsertIter);

Просмотреть файл

@ -181,8 +181,8 @@ struct txStylesheetAttr
nsString mValue;
};
class txStylesheetCompiler : private txStylesheetCompilerState,
public txACompileObserver
class txStylesheetCompiler MOZ_FINAL : private txStylesheetCompilerState,
public txACompileObserver
{
public:
friend class txStylesheetCompilerState;
@ -216,6 +216,11 @@ public:
NS_INLINE_DECL_REFCOUNTING(txStylesheetCompiler)
private:
// Private destructor, to discourage deletion outside of Release():
~txStylesheetCompiler()
{
}
nsresult startElementInternal(int32_t aNamespaceID, nsIAtom* aLocalName,
nsIAtom* aPrefix,
txStylesheetAttr* aAttributes,

Просмотреть файл

@ -28,10 +28,15 @@ class Selection;
*/
// first a helper struct for saving/setting ranges
struct nsRangeStore
struct nsRangeStore MOZ_FINAL
{
nsRangeStore();
private:
// Private destructor, to discourage deletion outside of Release():
~nsRangeStore();
public:
nsresult StoreRange(nsIDOMRange *aRange);
nsresult GetRange(nsRange** outRange);

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше