merge mozilla-inbound to mozilla-central a=merge

This commit is contained in:
Carsten "Tomcat" Book 2015-12-23 12:00:09 +01:00
Родитель 458c416b6d 0fb857d90b
Коммит 719ce6825c
510 изменённых файлов: 21361 добавлений и 12520 удалений

Просмотреть файл

@ -430,7 +430,6 @@ var shell = {
window.addEventListener('sizemodechange', this);
window.addEventListener('unload', this);
this.contentBrowser.addEventListener('mozbrowserloadstart', this, true);
this.contentBrowser.addEventListener('mozbrowserselectionstatechanged', this, true);
this.contentBrowser.addEventListener('mozbrowserscrollviewchange', this, true);
this.contentBrowser.addEventListener('mozbrowsercaretstatechanged', this);
@ -464,7 +463,6 @@ var shell = {
window.removeEventListener('MozApplicationManifest', this);
window.removeEventListener('sizemodechange', this);
this.contentBrowser.removeEventListener('mozbrowserloadstart', this, true);
this.contentBrowser.removeEventListener('mozbrowserselectionstatechanged', this, true);
this.contentBrowser.removeEventListener('mozbrowserscrollviewchange', this, true);
this.contentBrowser.removeEventListener('mozbrowsercaretstatechanged', this);
ppmm.removeMessageListener("content-handler", this);
@ -584,29 +582,6 @@ var shell = {
detail: evt.detail,
});
break;
case 'mozbrowserselectionstatechanged':
// The mozbrowserselectionstatechanged event, may have crossed the chrome-content boundary.
// This event always dispatch to shell.js. But the offset we got from this event is
// based on tab's coordinate. So get the actual offsets between shell and evt.target.
let elt = evt.target;
let win = elt.ownerDocument.defaultView;
let offsetX = win.mozInnerScreenX - window.mozInnerScreenX;
let offsetY = win.mozInnerScreenY - window.mozInnerScreenY;
let rect = elt.getBoundingClientRect();
offsetX += rect.left;
offsetY += rect.top;
let data = evt.detail;
data.offsetX = offsetX;
data.offsetY = offsetY;
DoCommandHelper.setEvent(evt);
shell.sendChromeEvent({
type: 'selectionstatechanged',
detail: data,
});
break;
case 'mozbrowsercaretstatechanged':
{
let elt = evt.target;
@ -883,9 +858,6 @@ var CustomEventManager = {
case 'inputregistry-remove':
KeyboardHelper.handleEvent(detail);
break;
case 'do-command':
DoCommandHelper.handleEvent(detail.cmd);
break;
case 'copypaste-do-command':
Services.obs.notifyObservers({ wrappedJSObject: shell.contentBrowser },
'ask-children-to-execute-copypaste-command', detail.cmd);
@ -933,21 +905,6 @@ var CustomEventManager = {
}
}
var DoCommandHelper = {
_event: null,
setEvent: function docommand_setEvent(evt) {
this._event = evt;
},
handleEvent: function docommand_handleEvent(cmd) {
if (this._event) {
Services.obs.notifyObservers({ wrappedJSObject: this._event.target },
'copypaste-docommand', cmd);
this._event = null;
}
}
}
var WebappsHelper = {
_installers: {},
_count: 0,

Просмотреть файл

@ -813,18 +813,6 @@
@RESPATH@/res/accessiblecaret_tilt_right@1.5x.png
@RESPATH@/res/accessiblecaret_tilt_right@2.25x.png
@RESPATH@/res/accessiblecaret_tilt_right@2x.png
@RESPATH@/res/text_caret.png
@RESPATH@/res/text_caret@1.5x.png
@RESPATH@/res/text_caret@2.25x.png
@RESPATH@/res/text_caret@2x.png
@RESPATH@/res/text_caret_tilt_left.png
@RESPATH@/res/text_caret_tilt_left@1.5x.png
@RESPATH@/res/text_caret_tilt_left@2.25x.png
@RESPATH@/res/text_caret_tilt_left@2x.png
@RESPATH@/res/text_caret_tilt_right.png
@RESPATH@/res/text_caret_tilt_right@1.5x.png
@RESPATH@/res/text_caret_tilt_right@2.25x.png
@RESPATH@/res/text_caret_tilt_right@2x.png
@RESPATH@/res/grabber.gif
#ifdef XP_MACOSX
@RESPATH@/res/cursors/*

Просмотреть файл

@ -451,6 +451,12 @@ support-files =
tags = trackingprotection
support-files =
trackingPage.html
[browser_trackingUI_6.js]
tags = trackingprotection
support-files =
file_trackingUI_6.html
file_trackingUI_6.js
file_trackingUI_6.js^headers^
[browser_trackingUI_telemetry.js]
tags = trackingprotection
support-files =

Просмотреть файл

@ -0,0 +1,46 @@
const URL = "http://mochi.test:8888/browser/browser/base/content/test/general/file_trackingUI_6.html";
function waitForSecurityChange(numChanges = 1) {
return new Promise(resolve => {
let n = 0;
let listener = {
onSecurityChange: function() {
n = n + 1;
info ("Received onSecurityChange event " + n + " of " + numChanges);
if (n >= numChanges) {
gBrowser.removeProgressListener(listener);
resolve();
}
}
};
gBrowser.addProgressListener(listener);
});
}
add_task(function* test_fetch() {
yield new Promise(resolve => {
SpecialPowers.pushPrefEnv({ set: [['privacy.trackingprotection.enabled', true]] },
resolve);
});
yield BrowserTestUtils.withNewTab({ gBrowser, url: URL }, function* (newTabBrowser) {
let securityChange = waitForSecurityChange();
yield ContentTask.spawn(newTabBrowser, null, function* () {
yield content.wrappedJSObject.test_fetch()
.then((response) => { ok(false, "should have denied the request"); })
.catch((e) => { ok(true, `Caught exception: ${e}`); });
});
yield securityChange;
var TrackingProtection = newTabBrowser.ownerGlobal.TrackingProtection;
ok(TrackingProtection, "got TP object");
ok(TrackingProtection.enabled, "TP is enabled");
is(TrackingProtection.content.getAttribute("state"), "blocked-tracking-content",
'content: state="blocked-tracking-content"');
is(TrackingProtection.icon.getAttribute("state"), "blocked-tracking-content",
'icon: state="blocked-tracking-content"');
is(TrackingProtection.icon.getAttribute("tooltiptext"),
gNavigatorBundle.getString("trackingProtection.icon.activeTooltip"), "correct tooltip");
});
});

Просмотреть файл

@ -0,0 +1,16 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Testing the shield from fetch and XHR</title>
</head>
<body>
<p>Hello there!</p>
<script type="application/javascript; version=1.8">
function test_fetch() {
let url = "http://trackertest.org/browser/browser/base/content/test/general/file_trackingUI_6.js";
return fetch(url);
}
</script>
</body>
</html>

Просмотреть файл

@ -0,0 +1,2 @@
/* Some code goes here! */
void 0;

Просмотреть файл

@ -0,0 +1 @@
Access-Control-Allow-Origin: *

Просмотреть файл

@ -754,18 +754,6 @@
@RESPATH@/res/accessiblecaret_tilt_right@1.5x.png
@RESPATH@/res/accessiblecaret_tilt_right@2.25x.png
@RESPATH@/res/accessiblecaret_tilt_right@2x.png
@RESPATH@/res/text_caret.png
@RESPATH@/res/text_caret@1.5x.png
@RESPATH@/res/text_caret@2.25x.png
@RESPATH@/res/text_caret@2x.png
@RESPATH@/res/text_caret_tilt_left.png
@RESPATH@/res/text_caret_tilt_left@1.5x.png
@RESPATH@/res/text_caret_tilt_left@2.25x.png
@RESPATH@/res/text_caret_tilt_left@2x.png
@RESPATH@/res/text_caret_tilt_right.png
@RESPATH@/res/text_caret_tilt_right@1.5x.png
@RESPATH@/res/text_caret_tilt_right@2.25x.png
@RESPATH@/res/text_caret_tilt_right@2x.png
@RESPATH@/res/grabber.gif
#ifdef XP_MACOSX
@RESPATH@/res/cursors/*

Просмотреть файл

@ -43,6 +43,7 @@ fi
if test "$MOZ_ARCH" = "armv6" -a "$OS_TARGET" = "Android"; then
MOZ_FPU=vfp
MOZ_FLOAT_ABI=softfp
fi
MOZ_ARG_WITH_STRING(thumb,

Просмотреть файл

@ -111,13 +111,6 @@ class MachCommands(MachCommandBase):
'--show-possibly-lost=no',
'--track-origins=yes',
'--trace-children=yes',
# The gstreamer plugin scanner can run as part of executing
# firefox, but is an external program. In some weird cases,
# valgrind finds errors while executing __libc_freeres when
# it runs, but those are not relevant, as it's related to
# executing third party code. So don't trace
# gst-plugin-scanner.
'--trace-children-skip=*/gst-plugin-scanner',
'-v', # Enable verbosity to get the list of used suppressions
]

Просмотреть файл

@ -3691,7 +3691,6 @@ MOZ_VORBIS=
MOZ_TREMOR=
MOZ_SAMPLE_TYPE_FLOAT32=
MOZ_SAMPLE_TYPE_S16=
MOZ_GSTREAMER=
MOZ_DIRECTSHOW=
MOZ_WMF=
if test -n "$MOZ_FMP4"; then
@ -5562,69 +5561,6 @@ fi
AC_SUBST(MOZ_PULSEAUDIO)
dnl ========================================================
dnl = Enable GStreamer
dnl ========================================================
case "$OS_TARGET" in
WINNT|Darwin|Android)
;;
*)
MOZ_GSTREAMER=1
GST_API_VERSION=0.10
;;
esac
MOZ_ARG_ENABLE_STRING(gstreamer,
[ --enable-gstreamer[=0.10] Enable GStreamer support],
[ MOZ_GSTREAMER=1
# API version, eg 0.10, 1.0 etc
if test -z "$enableval" -o "$enableval" = "yes"; then
GST_API_VERSION=0.10
elif test "$enableval" = "no"; then
MOZ_GSTREAMER=
else
GST_API_VERSION=$enableval
fi],
)
if test -n "$MOZ_GSTREAMER"; then
# core/base release number
if test "$GST_API_VERSION" = "1.0"; then
GST_VERSION=1.0
else
GST_VERSION=0.10.25
fi
PKG_CHECK_MODULES(GSTREAMER,
gstreamer-$GST_API_VERSION >= $GST_VERSION
gstreamer-app-$GST_API_VERSION
gstreamer-plugins-base-$GST_API_VERSION,
[_HAVE_GSTREAMER=1],
[_HAVE_GSTREAMER=])
if test -z "$_HAVE_GSTREAMER"; then
AC_MSG_ERROR([gstreamer and gstreamer-plugins-base development packages are needed to build gstreamer backend. Install them or disable gstreamer support with --disable-gstreamer])
fi
_SAVE_LDFLAGS=$LDFLAGS
LDFLAGS="$LDFLAGS $GSTREAMER_LIBS -lgstvideo-$GST_API_VERSION"
AC_TRY_LINK(,[return 0;],_HAVE_LIBGSTVIDEO=1,_HAVE_LIBGSTVIDEO=)
if test -n "$_HAVE_LIBGSTVIDEO" ; then
GSTREAMER_LIBS="$GSTREAMER_LIBS -lgstvideo-$GST_API_VERSION"
else
AC_MSG_ERROR([gstreamer-plugins-base found, but no libgstvideo. Something has gone terribly wrong. Try reinstalling gstreamer-plugins-base; failing that, disable the gstreamer backend with --disable-gstreamer.])
fi
LDFLAGS=$_SAVE_LDFLAGS
fi
AC_SUBST(MOZ_GSTREAMER)
AC_SUBST(GST_API_VERSION)
if test -n "$MOZ_GSTREAMER"; then
AC_DEFINE(MOZ_GSTREAMER)
AC_DEFINE_UNQUOTED(GST_API_VERSION, "$GST_API_VERSION")
fi
dnl ========================================================
dnl Permissions System
dnl ========================================================

Просмотреть файл

@ -6,7 +6,11 @@
#include "AudioChannelAgent.h"
#include "AudioChannelService.h"
#include "mozilla/Preferences.h"
#include "nsIAppsService.h"
#include "nsIDocument.h"
#include "nsIDOMWindow.h"
#include "nsIPrincipal.h"
#include "nsPIDOMWindow.h"
#include "nsXULAppAPI.h"
@ -77,6 +81,81 @@ AudioChannelAgent::InitWithWeakCallback(nsIDOMWindow* aWindow,
/* useWeakRef = */ true);
}
nsresult
AudioChannelAgent::FindCorrectWindow(nsIDOMWindow* aWindow)
{
nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aWindow);
MOZ_ASSERT(window->IsInnerWindow());
mWindow = window->GetScriptableTop();
if (NS_WARN_IF(!mWindow)) {
return NS_OK;
}
mWindow = mWindow->GetOuterWindow();
if (NS_WARN_IF(!mWindow)) {
return NS_ERROR_FAILURE;
}
// From here we do an hack for nested iframes.
// The system app doesn't have access to the nested iframe objects so it
// cannot control the volume of the agents running in nested apps. What we do
// here is to assign those Agents to the top scriptable window of the parent
// iframe (what is controlled by the system app).
// For doing this we go recursively back into the chain of windows until we
// find apps that are not the system one.
window = mWindow->GetParent();
if (!window || window == mWindow) {
return NS_OK;
}
window = window->GetCurrentInnerWindow();
if (!window) {
return NS_OK;
}
nsCOMPtr<nsIDocument> doc = window->GetExtantDoc();
if (!doc) {
return NS_OK;
}
nsCOMPtr<nsIPrincipal> principal = doc->NodePrincipal();
uint32_t appId;
nsresult rv = principal->GetAppId(&appId);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
if (appId == nsIScriptSecurityManager::NO_APP_ID ||
appId == nsIScriptSecurityManager::UNKNOWN_APP_ID) {
return NS_OK;
}
nsCOMPtr<nsIAppsService> appsService = do_GetService(APPS_SERVICE_CONTRACTID);
if (NS_WARN_IF(!appsService)) {
return NS_ERROR_FAILURE;
}
nsAdoptingString systemAppManifest =
mozilla::Preferences::GetString("b2g.system_manifest_url");
if (!systemAppManifest) {
return NS_OK;
}
uint32_t systemAppId;
rv = appsService->GetAppLocalIdByManifestURL(systemAppManifest, &systemAppId);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
if (systemAppId == appId) {
return NS_OK;
}
return FindCorrectWindow(window);
}
nsresult
AudioChannelAgent::InitInternal(nsIDOMWindow* aWindow, int32_t aChannelType,
nsIAudioChannelAgentCallback *aCallback,
@ -108,18 +187,9 @@ AudioChannelAgent::InitInternal(nsIDOMWindow* aWindow, int32_t aChannelType,
MOZ_ASSERT(pInnerWindow->IsInnerWindow());
mInnerWindowID = pInnerWindow->WindowID();
nsCOMPtr<nsPIDOMWindow> topWindow = pInnerWindow->GetScriptableTop();
if (NS_WARN_IF(!topWindow)) {
return NS_OK;
}
mWindow = do_QueryInterface(topWindow);
if (mWindow) {
mWindow = mWindow->GetOuterWindow();
}
if (NS_WARN_IF(!mWindow)) {
return NS_ERROR_FAILURE;
nsresult rv = FindCorrectWindow(aWindow);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
mAudioChannelType = aChannelType;

Просмотреть файл

@ -56,6 +56,8 @@ private:
void Shutdown();
nsresult FindCorrectWindow(nsIDOMWindow* aWindow);
nsCOMPtr<nsPIDOMWindow> mWindow;
nsCOMPtr<nsIAudioChannelAgentCallback> mCallback;

Просмотреть файл

@ -14,6 +14,7 @@
#include "mozilla/dom/ContentChild.h"
#include "mozilla/dom/ContentParent.h"
#include "mozilla/dom/TabParent.h"
#include "nsContentUtils.h"
#include "nsIScriptSecurityManager.h"
@ -219,6 +220,7 @@ AudioChannelService::Shutdown()
gAudioChannelService->mWindows.Clear();
gAudioChannelService->mPlayingChildren.Clear();
gAudioChannelService->mTabParents.Clear();
#ifdef MOZ_WIDGET_GONK
gAudioChannelService->mSpeakerManager.Clear();
#endif
@ -341,6 +343,21 @@ AudioChannelService::UnregisterAudioChannelAgent(AudioChannelAgent* aAgent,
MaybeSendStatusUpdate();
}
void
AudioChannelService::RegisterTabParent(TabParent* aTabParent)
{
MOZ_ASSERT(aTabParent);
MOZ_ASSERT(!mTabParents.Contains(aTabParent));
mTabParents.AppendElement(aTabParent);
}
void
AudioChannelService::UnregisterTabParent(TabParent* aTabParent)
{
MOZ_ASSERT(aTabParent);
mTabParents.RemoveElement(aTabParent);
}
void
AudioChannelService::GetState(nsPIDOMWindow* aWindow, uint32_t aAudioChannel,
float* aVolume, bool* aMuted)
@ -560,6 +577,32 @@ AudioChannelService::Observe(nsISupports* aSubject, const char* aTopic,
return NS_OK;
}
void
AudioChannelService::RefreshAgentsVolumeAndPropagate(AudioChannel aAudioChannel,
nsPIDOMWindow* aWindow)
{
MOZ_ASSERT(aWindow);
MOZ_ASSERT(aWindow->IsOuterWindow());
nsCOMPtr<nsPIDOMWindow> topWindow = aWindow->GetScriptableTop();
if (!topWindow) {
return;
}
AudioChannelWindow* winData = GetWindowData(topWindow->WindowID());
if (!winData) {
return;
}
for (uint32_t i = 0; i < mTabParents.Length(); ++i) {
mTabParents[i]->AudioChannelChangeNotification(aWindow, aAudioChannel,
winData->mChannels[(uint32_t)aAudioChannel].mVolume,
winData->mChannels[(uint32_t)aAudioChannel].mMuted);
}
RefreshAgentsVolume(aWindow);
}
void
AudioChannelService::RefreshAgentsVolume(nsPIDOMWindow* aWindow)
{
@ -751,7 +794,7 @@ AudioChannelService::SetAudioChannelVolume(nsPIDOMWindow* aWindow,
AudioChannelWindow* winData = GetOrCreateWindowData(aWindow);
winData->mChannels[(uint32_t)aAudioChannel].mVolume = aVolume;
RefreshAgentsVolume(aWindow);
RefreshAgentsVolumeAndPropagate(aAudioChannel, aWindow);
}
NS_IMETHODIMP
@ -814,7 +857,7 @@ AudioChannelService::SetAudioChannelMuted(nsPIDOMWindow* aWindow,
AudioChannelWindow* winData = GetOrCreateWindowData(aWindow);
winData->mChannels[(uint32_t)aAudioChannel].mMuted = aMuted;
RefreshAgentsVolume(aWindow);
RefreshAgentsVolumeAndPropagate(aAudioChannel, aWindow);
}
NS_IMETHODIMP

Просмотреть файл

@ -23,10 +23,13 @@ struct PRLogModuleInfo;
namespace mozilla {
namespace dom {
#ifdef MOZ_WIDGET_GONK
class SpeakerManagerService;
#endif
class TabParent;
#define NUMBER_OF_AUDIO_CHANNELS (uint32_t)AudioChannel::EndGuard_
class AudioChannelService final : public nsIAudioChannelService
@ -63,6 +66,12 @@ public:
void UnregisterAudioChannelAgent(AudioChannelAgent* aAgent,
uint32_t aNotifyPlayback);
/**
* For nested iframes.
*/
void RegisterTabParent(TabParent* aTabParent);
void UnregisterTabParent(TabParent* aTabParent);
/**
* Return the state to indicate this audioChannel for his window should keep
* playing/muted.
@ -108,6 +117,9 @@ public:
void RefreshAgentsVolume(nsPIDOMWindow* aWindow);
void RefreshAgentsVolumeAndPropagate(AudioChannel aAudioChannel,
nsPIDOMWindow* aWindow);
// This method needs to know the inner window that wants to capture audio. We
// group agents per top outer window, but we can have multiple innerWindow per
// top outerWindow (subiframes, etc.) and we have to identify all the agents
@ -223,6 +235,9 @@ private:
nsTArray<SpeakerManagerService*> mSpeakerManager;
#endif
// Raw pointers because TabParents must unregister themselves.
nsTArray<TabParent*> mTabParents;
nsCOMPtr<nsIRunnable> mRunnable;
uint64_t mDefChannelChildID;

Просмотреть файл

@ -1910,9 +1910,11 @@ WebSocket::CreateAndDispatchCloseEvent(bool aWasClean,
MOZ_ASSERT(mImpl);
AssertIsOnTargetThread();
mImpl->mService->WebSocketClosed(mImpl->mChannel->Serial(),
mImpl->mInnerWindowID,
aWasClean, aCode, aReason);
if (mImpl->mChannel) {
mImpl->mService->WebSocketClosed(mImpl->mChannel->Serial(),
mImpl->mInnerWindowID,
aWasClean, aCode, aReason);
}
nsresult rv = CheckInnerWindowCorrectness();
if (NS_FAILED(rv)) {

Просмотреть файл

@ -41,7 +41,6 @@
#include "nsIObjectFrame.h"
#include "nsBindingManager.h"
#include "nsStyleCoord.h"
#include "SelectionCarets.h"
#include "TabChild.h"
#include "nsFrameLoader.h"
@ -1692,11 +1691,6 @@ nsFocusManager::Blur(nsPIDOMWindow* aWindowToClear,
SetCaretVisible(presShell, false, nullptr);
}
RefPtr<SelectionCarets> selectionCarets = presShell->GetSelectionCarets();
if (selectionCarets) {
selectionCarets->NotifyBlur(aIsLeavingDocument || !mActiveWindow);
}
RefPtr<AccessibleCaretEventHub> eventHub = presShell->GetAccessibleCaretEventHub();
if (eventHub) {
eventHub->NotifyBlur(aIsLeavingDocument || !mActiveWindow);

Просмотреть файл

@ -207,7 +207,6 @@
#include "prrng.h"
#include "nsSandboxFlags.h"
#include "TimeChangeObserver.h"
#include "TouchCaret.h"
#include "mozilla/dom/AudioContext.h"
#include "mozilla/dom/BrowserElementDictionariesBinding.h"
#include "mozilla/dom/cache/CacheStorage.h"

Просмотреть файл

@ -5,6 +5,7 @@
#include "nsISupports.idl"
interface mozIApplication;
interface nsFrameLoader;
interface nsIDocShell;
interface nsIURI;
@ -214,7 +215,7 @@ class nsFrameLoader;
native alreadyAddRefed_nsFrameLoader(already_AddRefed<nsFrameLoader>);
[scriptable, uuid(c4abebcf-55f3-47d4-af15-151311971255)]
[scriptable, uuid(adc1b3ba-8deb-4943-8045-e6de0044f2ce)]
interface nsIFrameLoaderOwner : nsISupports
{
/**
@ -223,6 +224,12 @@ interface nsIFrameLoaderOwner : nsISupports
readonly attribute nsIFrameLoader frameLoader;
[noscript, notxpcom] alreadyAddRefed_nsFrameLoader GetFrameLoader();
/**
* The principal of parent mozIApplication in case of nested mozbrowser
* iframes.
*/
readonly attribute mozIApplication parentApplication;
/**
* Puts the FrameLoaderOwner in prerendering mode.
*/

Просмотреть файл

@ -1215,6 +1215,17 @@ nsObjectLoadingContent::GetFrameLoader()
return loader.forget();
}
NS_IMETHODIMP
nsObjectLoadingContent::GetParentApplication(mozIApplication** aApplication)
{
if (!aApplication) {
return NS_ERROR_FAILURE;
}
*aApplication = nullptr;
return NS_OK;
}
NS_IMETHODIMP
nsObjectLoadingContent::SetIsPrerendered()
{

Просмотреть файл

@ -7,6 +7,8 @@
#include "mozilla/Services.h"
#include "mozilla/dom/BrowserElementAudioChannelBinding.h"
#include "mozilla/dom/DOMRequest.h"
#include "mozilla/dom/Element.h"
#include "mozilla/dom/TabParent.h"
#include "mozilla/dom/Promise.h"
#include "mozilla/dom/PromiseNativeHandler.h"
#include "mozilla/dom/ToJSValue.h"
@ -23,16 +25,6 @@
#include "nsPIDOMWindow.h"
#include "nsServiceManagerUtils.h"
namespace {
void
AssertIsInMainProcess()
{
MOZ_ASSERT(XRE_GetProcessType() == GeckoProcessType_Default);
}
} // anonymous namespace
namespace mozilla {
namespace dom {
@ -89,7 +81,6 @@ BrowserElementAudioChannel::BrowserElementAudioChannel(
, mState(eStateUnknown)
{
MOZ_ASSERT(NS_IsMainThread());
AssertIsInMainProcess();
nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
if (obs) {
@ -107,7 +98,6 @@ BrowserElementAudioChannel::BrowserElementAudioChannel(
BrowserElementAudioChannel::~BrowserElementAudioChannel()
{
MOZ_ASSERT(NS_IsMainThread());
AssertIsInMainProcess();
nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
if (obs) {
@ -173,8 +163,6 @@ AudioChannel
BrowserElementAudioChannel::Name() const
{
MOZ_ASSERT(NS_IsMainThread());
AssertIsInMainProcess();
return mAudioChannel;
}
@ -361,7 +349,6 @@ already_AddRefed<dom::DOMRequest>
BrowserElementAudioChannel::GetVolume(ErrorResult& aRv)
{
MOZ_ASSERT(NS_IsMainThread());
AssertIsInMainProcess();
if (!mFrameWindow) {
nsCOMPtr<nsIDOMDOMRequest> request;
@ -387,7 +374,6 @@ already_AddRefed<dom::DOMRequest>
BrowserElementAudioChannel::SetVolume(float aVolume, ErrorResult& aRv)
{
MOZ_ASSERT(NS_IsMainThread());
AssertIsInMainProcess();
if (!mFrameWindow) {
nsCOMPtr<nsIDOMDOMRequest> request;
@ -420,7 +406,6 @@ already_AddRefed<dom::DOMRequest>
BrowserElementAudioChannel::GetMuted(ErrorResult& aRv)
{
MOZ_ASSERT(NS_IsMainThread());
AssertIsInMainProcess();
if (!mFrameWindow) {
nsCOMPtr<nsIDOMDOMRequest> request;
@ -446,7 +431,6 @@ already_AddRefed<dom::DOMRequest>
BrowserElementAudioChannel::SetMuted(bool aMuted, ErrorResult& aRv)
{
MOZ_ASSERT(NS_IsMainThread());
AssertIsInMainProcess();
if (!mFrameWindow) {
nsCOMPtr<nsIDOMDOMRequest> request;
@ -479,7 +463,6 @@ already_AddRefed<dom::DOMRequest>
BrowserElementAudioChannel::IsActive(ErrorResult& aRv)
{
MOZ_ASSERT(NS_IsMainThread());
AssertIsInMainProcess();
if (mState != eStateUnknown) {
RefPtr<DOMRequest> domRequest = new DOMRequest(GetOwner());
@ -593,8 +576,29 @@ BrowserElementAudioChannel::Observe(nsISupports* aSubject, const char* aTopic,
}
nsCOMPtr<nsISupportsPRUint64> wrapper = do_QueryInterface(aSubject);
if (NS_WARN_IF(!wrapper)) {
return NS_ERROR_FAILURE;
// This can be a nested iframe.
if (!wrapper) {
nsCOMPtr<nsITabParent> iTabParent = do_QueryInterface(aSubject);
if (!iTabParent) {
return NS_ERROR_FAILURE;
}
RefPtr<TabParent> tabParent = TabParent::GetFrom(iTabParent);
if (!tabParent) {
return NS_ERROR_FAILURE;
}
Element* element = tabParent->GetOwnerElement();
if (!element) {
return NS_ERROR_FAILURE;
}
nsCOMPtr<nsPIDOMWindow> window = element->OwnerDoc()->GetWindow();
if (window == mFrameWindow) {
ProcessStateChanged(aData);
}
return NS_OK;
}
uint64_t windowID;

Просмотреть файл

@ -139,7 +139,6 @@ function BrowserElementChild() {
this._isContentWindowCreated = false;
this._pendingSetInputMethodActive = [];
this._selectionStateChangedTarget = null;
this.forwarder = new BrowserElementProxyForwarder();
@ -224,11 +223,6 @@ BrowserElementChild.prototype = {
/* useCapture = */ true,
/* wantsUntrusted = */ false);
addEventListener('mozselectionstatechanged',
this._selectionStateChangedHandler.bind(this),
/* useCapture = */ true,
/* wantsUntrusted = */ false);
addEventListener('scrollviewchange',
this._ScrollViewChangeHandler.bind(this),
/* useCapture = */ true,
@ -719,97 +713,6 @@ BrowserElementChild.prototype = {
}
},
_selectionStateChangedHandler: function(e) {
e.stopPropagation();
if (!this._isContentWindowCreated) {
return;
}
let boundingClientRect = e.boundingClientRect;
let isCollapsed = (e.selectedText.length == 0);
let isMouseUp = (e.states.indexOf('mouseup') == 0);
let canPaste = this._isCommandEnabled("paste");
if (this._selectionStateChangedTarget != e.target) {
// SelectionStateChanged events with the following states are not
// necessary to trigger the text dialog, bypass these events
// by default.
//
if(e.states.length == 0 ||
e.states.indexOf('drag') == 0 ||
e.states.indexOf('keypress') == 0 ||
e.states.indexOf('mousedown') == 0) {
return;
}
// The collapsed SelectionStateChanged event is unnecessary to dispatch,
// bypass this event by default, but here comes some exceptional cases
if (isCollapsed) {
if (isMouseUp && canPaste) {
// Always dispatch to support shortcut mode which can paste previous
// copied content easily
} else if (e.states.indexOf('blur') == 0) {
// Always dispatch to notify the blur for the focus content
} else if (e.states.indexOf('taponcaret') == 0) {
// Always dispatch to notify the caret be touched
} else {
return;
}
}
}
// If we select something and selection range is visible, we cache current
// event's target to selectionStateChangedTarget.
// And dispatch the next SelectionStateChagne event if target is matched, so
// that the parent side can hide the text dialog.
// We clear selectionStateChangedTarget if selection carets are invisible.
if (e.visible && !isCollapsed) {
this._selectionStateChangedTarget = e.target;
} else if (canPaste && isCollapsed) {
this._selectionStateChangedTarget = e.target;
} else {
this._selectionStateChangedTarget = null;
}
let zoomFactor = content.screen.width / content.innerWidth;
let detail = {
rect: {
width: boundingClientRect ? boundingClientRect.width : 0,
height: boundingClientRect ? boundingClientRect.height : 0,
top: boundingClientRect ? boundingClientRect.top : 0,
bottom: boundingClientRect ? boundingClientRect.bottom : 0,
left: boundingClientRect ? boundingClientRect.left : 0,
right: boundingClientRect ? boundingClientRect.right : 0,
},
commands: {
canSelectAll: this._isCommandEnabled("selectall"),
canCut: this._isCommandEnabled("cut"),
canCopy: this._isCommandEnabled("copy"),
canPaste: this._isCommandEnabled("paste"),
},
zoomFactor: zoomFactor,
states: e.states,
isCollapsed: (e.selectedText.length == 0),
visible: e.visible,
};
// Get correct geometry information if we have nested iframe.
let currentWindow = e.target.defaultView;
while (currentWindow.realFrameElement) {
let currentRect = currentWindow.realFrameElement.getBoundingClientRect();
detail.rect.top += currentRect.top;
detail.rect.bottom += currentRect.top;
detail.rect.left += currentRect.left;
detail.rect.right += currentRect.left;
currentWindow = currentWindow.realFrameElement.ownerDocument.defaultView;
}
sendAsyncMsg('selectionstatechanged', detail);
},
_genericMetaHandler: function(name, eventType, target) {
let meta = {
name: name,
@ -1481,7 +1384,6 @@ BrowserElementChild.prototype = {
_recvDoCommand: function(data) {
if (this._isCommandEnabled(data.json.command)) {
this._selectionStateChangedTarget = null;
docShell.doCommand(COMMAND_MAP[data.json.command]);
}
},

Просмотреть файл

@ -81,7 +81,7 @@ BrowserElementParentProxyCallHandler.prototype = {
"contextmenu", "securitychange", "locationchange",
"iconchange", "scrollareachanged", "titlechange",
"opensearch", "manifestchange", "metachange",
"resize", "selectionstatechanged", "scrollviewchange",
"resize", "scrollviewchange",
"caretstatechanged", "activitydone", "scroll", "opentab"]),
init: function(frameElement, mm) {
@ -380,7 +380,6 @@ BrowserElementParent.prototype = {
"got-visible": this._gotDOMRequestResult,
"visibilitychange": this._childVisibilityChange,
"got-set-input-method-active": this._gotDOMRequestResult,
"selectionstatechanged": this._handleSelectionStateChanged,
"scrollviewchange": this._handleScrollViewChange,
"caretstatechanged": this._handleCaretStateChanged,
"findchange": this._handleFindChange,
@ -619,12 +618,6 @@ BrowserElementParent.prototype = {
}
},
_handleSelectionStateChanged: function(data) {
let evt = this._createEvent('selectionstatechanged', data.json,
/* cancelable = */ false);
this._frameElement.dispatchEvent(evt);
},
// Called when state of accessible caret in child has changed.
// The fields of data is as following:
// - rect: Contains bounding rectangle of selection, Include width, height,

Просмотреть файл

@ -65,10 +65,6 @@ const browserElementTestHelpers = {
this._setPref('dom.mozBrowserFramesEnabled', value);
},
setSelectionChangeEnabledPref: function(value) {
this._setPref('selectioncaret.enabled', value);
},
setAccessibleCaretEnabledPref: function(value) {
this._setPref('layout.accessiblecaret.enabled', value);
},

Просмотреть файл

@ -192,7 +192,10 @@ function runTests() {
}
addEventListener('load', function() {
SimpleTest.executeSoon(runTests);
addEventListener('testready', function() {
SpecialPowers.pushPrefEnv({'set': [["b2g.system_manifest_url", "http://mochi.test:8888/manifest.webapp"]]},
function() {
SimpleTest.executeSoon(runTests);
});
});

Просмотреть файл

@ -0,0 +1,79 @@
/* Any copyright is dedicated to the public domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
// Bug 1113086 - tests for AudioChannel API into BrowserElement
"use strict";
SimpleTest.waitForExplicitFinish();
browserElementTestHelpers.setEnabledPref(true);
browserElementTestHelpers.addPermission();
function runTests() {
var iframe = document.createElement('iframe');
iframe.setAttribute('mozbrowser', 'true');
iframe.setAttribute('mozapp', 'http://example.org/manifest.webapp');
var listener = function(e) {
var message = e.detail.message;
if (/^OK/.exec(message)) {
ok(true, "Message from app: " + message);
} else if (/^KO/.exec(message)) {
ok(false, "Message from app: " + message);
} else if (/DONE/.exec(message)) {
ok(true, "Messaging from app complete");
iframe.removeEventListener('mozbrowsershowmodalprompt', listener);
}
}
function audio_loadend() {
ok("mute" in iframe, "iframe.mute exists");
ok("unmute" in iframe, "iframe.unmute exists");
ok("getMuted" in iframe, "iframe.getMuted exists");
ok("getVolume" in iframe, "iframe.getVolume exists");
ok("setVolume" in iframe, "iframe.setVolume exists");
ok("allowedAudioChannels" in iframe, "allowedAudioChannels exist");
var channels = iframe.allowedAudioChannels;
is(channels.length, 1, "1 audio channel by default");
var ac = channels[0];
ok(ac instanceof BrowserElementAudioChannel, "Correct class");
ok("getVolume" in ac, "ac.getVolume exists");
ok("setVolume" in ac, "ac.setVolume exists");
ok("getMuted" in ac, "ac.getMuted exists");
ok("setMuted" in ac, "ac.setMuted exists");
ok("isActive" in ac, "ac.isActive exists");
info("Setting the volume...");
ac.setVolume(0.5);
ac.onactivestatechanged = function() {
ok(true, "activestatechanged event received.");
ac.onactivestatechanged = null;
SimpleTest.finish();
}
}
iframe.addEventListener('mozbrowserloadend', audio_loadend);
iframe.addEventListener('mozbrowsershowmodalprompt', listener, false);
document.body.appendChild(iframe);
var context = { 'url': 'http://example.org',
'appId': SpecialPowers.Ci.nsIScriptSecurityManager.NO_APP_ID,
'isInBrowserElement': true };
SpecialPowers.pushPermissions([
{'type': 'browser', 'allow': 1, 'context': context},
{'type': 'embed-apps', 'allow': 1, 'context': context}
], function() {
iframe.src = 'http://example.org/tests/dom/browser-element/mochitest/file_browserElement_AudioChannel_nested.html';
});
}
addEventListener('testready', function() {
SpecialPowers.pushPrefEnv({'set': [["b2g.system_manifest_url", "http://mochi.test:8888/manifest.webapp"]]},
function() {
SimpleTest.executeSoon(runTests);
});
});

Просмотреть файл

@ -68,4 +68,9 @@ function runTest() {
iframe.src = browserElementTestHelpers.emptyPage1;
}
addEventListener('testready', runTest);
addEventListener('testready', function() {
SpecialPowers.pushPrefEnv({'set': [["b2g.system_manifest_url", "http://mochi.test:8888/manifest.webapp"]]},
function() {
SimpleTest.executeSoon(runTest);
});
});

Просмотреть файл

@ -1,14 +1,12 @@
/* Any copyright is dedicated to the public domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
// Test that "cut, copy, paste, selectall" and selectionstatechanged event works from inside an <iframe mozbrowser>.
// Test that "cut, copy, paste, selectall" and caretstatechanged event works from inside an <iframe mozbrowser>.
"use strict";
SimpleTest.waitForExplicitFinish();
SimpleTest.requestFlakyTimeout("untriaged");
SimpleTest.requestLongerTimeout(2); // slow on android
browserElementTestHelpers.setEnabledPref(true);
browserElementTestHelpers.setSelectionChangeEnabledPref(false);
browserElementTestHelpers.setAccessibleCaretEnabledPref(true);
browserElementTestHelpers.addPermission();
const { Services } = SpecialPowers.Cu.import('resource://gre/modules/Services.jsm');
@ -92,14 +90,6 @@ function doCommand(cmd) {
'copypaste-docommand', cmd);
}
function rerunTest() {
// clean up and run test again.
document.body.removeChild(iframeOuter);
document.body.removeChild(gTextarea);
state = 0;
runTest();
}
function dispatchTest(e) {
iframeInner.addEventListener("mozbrowserloadend", function onloadend2(e) {
iframeInner.removeEventListener("mozbrowserloadend", onloadend2);
@ -171,23 +161,15 @@ function dispatchTest(e) {
break;
default:
if (createEmbededFrame || browserElementTestHelpers.getOOPByDefaultPref()) {
if (testSelectionChange) {
SimpleTest.finish();
return;
} else {
testSelectionChange = true;
createEmbededFrame = false;
SpecialPowers.pushPrefEnv(
{'set':
[['selectioncaret.enabled', true],
['layout.accessiblecaret.enabled', false]]},
function() {
rerunTest();
});
}
SimpleTest.finish();
} else {
createEmbededFrame = true;
rerunTest();
// clean up and run test again.
document.body.removeChild(iframeOuter);
document.body.removeChild(gTextarea);
state = 0;
runTest();
}
break;
}
@ -202,25 +184,20 @@ function isChildProcess() {
function testSelectAll(e) {
// Skip mozbrowser test if we're at child process.
if (!isChildProcess()) {
let eventName = testSelectionChange ? "mozbrowserselectionstatechanged" : "mozbrowsercaretstatechanged";
iframeOuter.addEventListener(eventName, function selectchangeforselectall(e) {
if (!e.detail.states || e.detail.states.indexOf('selectall') == 0) {
iframeOuter.removeEventListener(eventName, selectchangeforselectall, true);
ok(true, "got mozbrowserselectionstatechanged event." + stateMeaning);
ok(e.detail, "event.detail is not null." + stateMeaning);
ok(e.detail.width != 0, "event.detail.width is not zero" + stateMeaning);
ok(e.detail.height != 0, "event.detail.height is not zero" + stateMeaning);
if (testSelectionChange) {
ok(e.detail.states, "event.detail.state " + e.detail.states);
}
SimpleTest.executeSoon(function() { testCopy1(e); });
}
let eventName = "mozbrowsercaretstatechanged";
iframeOuter.addEventListener(eventName, function caretchangeforselectall(e) {
iframeOuter.removeEventListener(eventName, caretchangeforselectall, true);
ok(true, "got mozbrowsercaretstatechanged event." + stateMeaning);
ok(e.detail, "event.detail is not null." + stateMeaning);
ok(e.detail.width != 0, "event.detail.width is not zero" + stateMeaning);
ok(e.detail.height != 0, "event.detail.height is not zero" + stateMeaning);
SimpleTest.executeSoon(function() { testCopy1(e); });
}, true);
}
mm.addMessageListener('content-focus', function messageforfocus(msg) {
mm.removeMessageListener('content-focus', messageforfocus);
// test selectall command, after calling this the selectionstatechanged event should be fired.
// test selectall command, after calling this the caretstatechanged event should be fired.
doCommand('selectall');
if (isChildProcess()) {
SimpleTest.executeSoon(function() { testCopy1(e); });

Просмотреть файл

@ -1,57 +0,0 @@
/* Any copyright is dedicated to the public domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
// Bug 1111433: Send out the SelectionStateChanged event with Blur state
"use strict";
SimpleTest.waitForExplicitFinish();
browserElementTestHelpers.setEnabledPref(true);
browserElementTestHelpers.setSelectionChangeEnabledPref(true);
browserElementTestHelpers.addPermission();
var mm;
var iframe;
var changefocus = function () {
var elt = content.document.getElementById("text");
if (elt) {
elt.focus();
elt.select();
elt.blur();
}
}
function runTest() {
iframe = document.createElement('iframe');
iframe.setAttribute('mozbrowser', 'true');
document.body.appendChild(iframe);
mm = SpecialPowers.getBrowserFrameMessageManager(iframe);
iframe.src = "data:text/html,<html><body>" +
"<textarea id='text'> Bug 1111433 </textarea>"+
"</body></html>";
var loadtime = 0;
iframe.addEventListener("mozbrowserloadend", function onloadend(e) {
loadtime++;
if (loadtime === 2) {
iframe.removeEventListener("mozbrowserloadend", onloadend);
SimpleTest.executeSoon(function() { testBlur(e); });
}
});
}
function testBlur(e) {
iframe.addEventListener("mozbrowserselectionstatechanged", function selectionstatechanged(e) {
iframe.removeEventListener("mozbrowserselectionstatechanged", selectionstatechanged, true);
ok(e.detail.states.indexOf('blur') == 0, "received state " + e.detail.states);
SimpleTest.finish();
}, true);
iframe.focus();
mm.loadFrameScript('data:,(' + changefocus.toString() + ')();', false);
}
addEventListener('testready', runTest);

Просмотреть файл

@ -0,0 +1,63 @@
<html>
<head>
<script type="text/javascript">
function ok(a, msg) {
alert((!!a ? "OK" : "KO") + " " + msg);
}
function is(a, b, msg) {
ok(a === b, msg);
}
function finish(a, b, msg) {
alert("DONE");
}
addEventListener('load', function(e) {
var iframe = document.createElement('iframe');
iframe.setAttribute('mozbrowser', 'true');
// set 'remote' to true here will make the the iframe remote in _inproc_
// test and in-process in _oop_ test.
iframe.setAttribute('remote', 'true');
iframe.setAttribute('mozapp', 'http://example.org/manifest.webapp');
iframe.addEventListener('mozbrowserloadend', function(e) {
ok("mute" in iframe, "iframe.mute exists");
ok("unmute" in iframe, "iframe.unmute exists");
ok("getMuted" in iframe, "iframe.getMuted exists");
ok("getVolume" in iframe, "iframe.getVolume exists");
ok("setVolume" in iframe, "iframe.setVolume exists");
ok("allowedAudioChannels" in iframe, "allowedAudioChannels exist");
var channels = iframe.allowedAudioChannels;
is(channels.length, 1, "1 audio channel by default");
var ac = channels[0];
ok(ac instanceof BrowserElementAudioChannel, "Correct class");
ok("getVolume" in ac, "ac.getVolume exists");
ok("setVolume" in ac, "ac.setVolume exists");
ok("getMuted" in ac, "ac.getMuted exists");
ok("setMuted" in ac, "ac.setMuted exists");
ok("isActive" in ac, "ac.isActive exists");
ac.onactivestatechanged = function() {
ok("activestatechanged event received.");
ac.getVolume().onsuccess = function(e) {
ok(e.target.result, 1, "Default volume is 1");
};
finish();
}
});
document.body.appendChild(iframe);
iframe.src = 'http://example.org/tests/dom/browser-element/mochitest/file_audio.html';
});
</script>
</head>
<body>
</body>
</html>

Просмотреть файл

@ -87,9 +87,6 @@ skip-if = (toolkit == 'gonk')
[test_browserElement_oop_ScrollEvent.html]
[test_browserElement_oop_SecurityChange.html]
skip-if = toolkit == 'android' || (toolkit == 'gonk' && !debug) #TIMED_OUT, bug 766586
[test_browserElement_oop_SelectionStateBlur.html]
skip-if = (os == "android" || toolkit == 'gonk') # Disabled on b2g due to bug 1097419
# Disabled on Android, see bug 1230230
[test_browserElement_oop_SendEvent.html]
[test_browserElement_oop_SetInputMethodActive.html]
skip-if = (os == "android")
@ -121,6 +118,7 @@ disabled = bug 924771
disabled = bug 924771
[test_browserElement_oop_GetContentDimensions.html]
[test_browserElement_oop_AudioChannel.html]
[test_browserElement_oop_AudioChannel_nested.html]
[test_browserElement_oop_SetNFCFocus.html]
[test_browserElement_oop_getWebManifest.html]
[test_browserElement_oop_OpenWindowEmpty.html]

Просмотреть файл

@ -64,7 +64,6 @@ support-files =
browserElement_ScrollEvent.js
browserElement_SecurityChange.js
browserElement_SendEvent.js
browserElement_SelectionStateBlur.js
browserElement_SetInputMethodActive.js
browserElement_SetNFCFocus.js
browserElement_SetVisible.js
@ -83,11 +82,13 @@ support-files =
browserElement_XFrameOptionsSameOrigin.js
browserElement_GetContentDimensions.js
browserElement_AudioChannel.js
browserElement_AudioChannel_nested.js
file_browserElement_AlertInFrame.html
file_browserElement_AlertInFrame_Inner.html
file_browserElement_AllowEmbedAppsInNestedOOIframe.html
file_browserElement_AppFramePermission.html
file_browserElement_AppWindowNamespace.html
file_browserElement_AudioChannel_nested.html
file_browserElement_Viewmode.html
file_browserElement_ThemeColor.html
file_browserElement_BrowserWindowNamespace.html
@ -222,9 +223,6 @@ skip-if = (toolkit == 'gonk')
[test_browserElement_inproc_ScrollEvent.html]
[test_browserElement_inproc_SecurityChange.html]
skip-if = toolkit == 'android' || (toolkit == 'gonk' && !debug) # android(TIMED_OUT, bug 766586) androidx86(TIMED_OUT, bug 766586)
[test_browserElement_inproc_SelectionStateBlur.html]
skip-if = (os == "android" || toolkit == 'gonk') # Disabled on b2g due to bug 1097419
# Disabled on Android, see bug 1230230
[test_browserElement_inproc_SendEvent.html]
# The setInputMethodActive() tests will timed out on Android
[test_browserElement_inproc_SetInputMethodActive.html]
@ -250,6 +248,7 @@ skip-if = (toolkit == 'android' && processor == 'x86') #x86 only bug 936226
disabled = bug 774100
[test_browserElement_inproc_GetContentDimensions.html]
[test_browserElement_inproc_AudioChannel.html]
[test_browserElement_inproc_AudioChannel_nested.html]
[test_browserElement_inproc_SetNFCFocus.html]
[test_browserElement_inproc_getStructuredData.html]
[test_browserElement_inproc_OpenWindowEmpty.html]

Просмотреть файл

@ -1,18 +1,13 @@
<!DOCTYPE HTML>
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=1111433
-->
<head>
<title>Test for Bug 1111433</title>
<title>Test of browser element audioChannel in nested mozbrowser iframes.</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="application/javascript" src="browserElementTestHelpers.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1111433">Mozilla Bug 1111433</a>
<script type="application/javascript;version=1.7" src="browserElement_SelectionStateBlur.js">
<script type="application/javascript;version=1.7" src="browserElement_AudioChannel_nested.js">
</script>
</body>
</html>

Просмотреть файл

@ -1,18 +1,13 @@
<!DOCTYPE HTML>
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=1111433
-->
<head>
<title>Test for Bug 1111433</title>
<title>Test of browser element audioChannel.</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="application/javascript" src="browserElementTestHelpers.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1111433">Mozilla Bug 1111433</a>
<script type="application/javascript;version=1.7" src="browserElement_SelectionStateBlur.js">
<script type="application/javascript;version=1.7" src="browserElement_AudioChannel_nested.js">
</script>
</body>
</html>

Просмотреть файл

@ -1,106 +0,0 @@
<!DOCTYPE HTML>
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=648573
-->
<head>
<title>Test for Bug 648573</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=648573">Mozilla Bug 648573</a>
<p id="display"></p>
<div id="content" style="display: none">
</div>
<pre id="test">
<script type="application/javascript">
/** Test for Bug 648573 **/
SimpleTest.waitForExplicitFinish();
var utils = SpecialPowers.getDOMWindowUtils(window);
ok("createTouch" in document, "Should have createTouch function");
ok("createTouchList" in document, "Should have createTouchList function");
ok(document.createEvent("touchevent"), "Should be able to create TouchEvent objects");
var t1 = document.createTouch(window, document, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11);
is(t1.target, document, "Wrong target");
is(t1.identifier, 1, "Wrong identifier");
is(t1.pageX, 2, "Wrong pageX");
is(t1.pageY, 3, "Wrong pageY");
is(t1.screenX, 4, "Wrong screenX");
is(t1.screenY, 5, "Wrong screenY");
is(t1.clientX, 6, "Wrong clientX");
is(t1.clientY, 7, "Wrong clientY");
is(t1.radiusX, 8, "Wrong radiusX");
is(t1.radiusY, 9, "Wrong radiusY");
is(t1.rotationAngle, 10, "Wrong rotationAngle");
is(t1.force, 11, "Wrong force");
var t2 = document.createTouch();
var l1 = document.createTouchList(t1);
is(l1.length, 1, "Wrong length");
is(l1.item(0), t1, "Wront item (1)");
is(l1[0], t1, "Wront item (2)");
var l2 = document.createTouchList([t1, t2]);
is(l2.length, 2, "Wrong length");
is(l2.item(0), t1, "Wront item (3)");
is(l2.item(1), t2, "Wront item (4)");
is(l2[0], t1, "Wront item (5)");
is(l2[1], t2, "Wront item (6)");
var l3 = document.createTouchList();
var e = document.createEvent("touchevent");
e.initTouchEvent("touchmove", true, true, window, 0, true, true, true, true,
l1, l2, l3);
is(e.touches, l1, "Wrong list (1)");
is(e.targetTouches, l2, "Wrong list (2)");
is(e.changedTouches, l3, "Wrong list (3)");
ok(e.altKey, "Alt should be true");
ok(e.metaKey, "Meta should be true");
ok(e.ctrlKey, "Ctrl should be true");
ok(e.shiftKey, "Shift should be true");
var events =
["touchstart",
"touchend",
"touchmove",
"touchcancel"];
function runEventTest(type) {
var e = document.createEvent("touchevent");
e.initTouchEvent(type, true, true, window, 0, true, true, true, true,
l1, l2, l3);
var t = document.createElement("div");
// Testing target.onFoo;
var didCall = false;
t["on" + type] = function (evt) {
is(evt, e, "Wrong event");
evt.target.didCall = true;
}
t.dispatchEvent(e);
ok(t.didCall, "Should have called the listener(1)");
// Testing <element onFoo="">
t = document.createElement("div");
t.setAttribute("on" + type, "this.didCall = true;");
t.dispatchEvent(e);
ok(t.didCall, "Should have called the listener(2)");
}
for (var i = 0; i < events.length; ++i) {
runEventTest(events[i]);
}
SimpleTest.finish();
</script>
</pre>
</body>
</html>

Просмотреть файл

@ -5,7 +5,6 @@ support-files =
bug299673.js
bug322588-popup.html
bug426082.html
bug648573.html
bug656379-1.html
bug418986-3.js
error_event_worker.js

Просмотреть файл

@ -4,27 +4,106 @@
<!DOCTYPE html>
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=648573
-->
<head>
<title>Bug 648573 test</title>
<style>
iframe {
width: 600px;
height: 400px;
}
</style>
<title>Test for Bug 648573</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<div id="container"></div>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=648573">Mozilla Bug 648573</a>
<p id="display"></p>
<div id="content" style="display: none">
</div>
<pre id="test">
<script type="application/javascript">
/** Test for Bug 648573 **/
SimpleTest.waitForExplicitFinish();
var utils = SpecialPowers.getDOMWindowUtils(window);
ok("createTouch" in document, "Should have createTouch function");
ok("createTouchList" in document, "Should have createTouchList function");
ok(document.createEvent("touchevent"), "Should be able to create TouchEvent objects");
var t1 = document.createTouch(window, document, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11);
is(t1.target, document, "Wrong target");
is(t1.identifier, 1, "Wrong identifier");
is(t1.pageX, 2, "Wrong pageX");
is(t1.pageY, 3, "Wrong pageY");
is(t1.screenX, 4, "Wrong screenX");
is(t1.screenY, 5, "Wrong screenY");
is(t1.clientX, 6, "Wrong clientX");
is(t1.clientY, 7, "Wrong clientY");
is(t1.radiusX, 8, "Wrong radiusX");
is(t1.radiusY, 9, "Wrong radiusY");
is(t1.rotationAngle, 10, "Wrong rotationAngle");
is(t1.force, 11, "Wrong force");
var t2 = document.createTouch();
var l1 = document.createTouchList(t1);
is(l1.length, 1, "Wrong length");
is(l1.item(0), t1, "Wront item (1)");
is(l1[0], t1, "Wront item (2)");
var l2 = document.createTouchList([t1, t2]);
is(l2.length, 2, "Wrong length");
is(l2.item(0), t1, "Wront item (3)");
is(l2.item(1), t2, "Wront item (4)");
is(l2[0], t1, "Wront item (5)");
is(l2[1], t2, "Wront item (6)");
var l3 = document.createTouchList();
var e = document.createEvent("touchevent");
e.initTouchEvent("touchmove", true, true, window, 0, true, true, true, true,
l1, l2, l3);
is(e.touches, l1, "Wrong list (1)");
is(e.targetTouches, l2, "Wrong list (2)");
is(e.changedTouches, l3, "Wrong list (3)");
ok(e.altKey, "Alt should be true");
ok(e.metaKey, "Meta should be true");
ok(e.ctrlKey, "Ctrl should be true");
ok(e.shiftKey, "Shift should be true");
var events =
["touchstart",
"touchend",
"touchmove",
"touchcancel"];
function runEventTest(type) {
var e = document.createEvent("touchevent");
e.initTouchEvent(type, true, true, window, 0, true, true, true, true,
l1, l2, l3);
var t = document.createElement("div");
// Testing target.onFoo;
var didCall = false;
t["on" + type] = function (evt) {
is(evt, e, "Wrong event");
evt.target.didCall = true;
}
t.dispatchEvent(e);
ok(t.didCall, "Should have called the listener(1)");
// Testing <element onFoo="">
t = document.createElement("div");
t.setAttribute("on" + type, "this.didCall = true;");
t.dispatchEvent(e);
ok(t.didCall, "Should have called the listener(2)");
}
for (var i = 0; i < events.length; ++i) {
runEventTest(events[i]);
}
SimpleTest.finish();
</script>
</pre>
</body>
<script>
// Touch/Selection caret's pref is checked only when PresShell is initialized.
// To turn off the pref, we test bug 648573 in an iframe.
SpecialPowers.pushPrefEnv({"set": [['touchcaret.enabled', false]]}, function() {
SpecialPowers.pushPrefEnv({"set": [['selectioncaret.enabled', false]]}, function() {
var iframe = document.createElement("iframe");
iframe.src = "bug648573.html";
document.getElementById('container').appendChild(iframe);
});
});
</script>
</html>

Просмотреть файл

@ -49,11 +49,10 @@ function isEnabledMiddleClickPaste()
}
}
function isEnabledTouchCaret()
function isEnabledAccessibleCaret()
{
try {
return SpecialPowers.getBoolPref("touchcaret.enabled") ||
SpecialPowers.getBoolPref("layout.accessiblecaret.enabled");
return SpecialPowers.getBoolPref("layout.accessiblecaret.enabled");
} catch (e) {
return false;
}
@ -69,7 +68,7 @@ function doTest(aButton)
// then, the click event isn't generated.
if (aButton != 2 &&
(aButton != 1 || !isEnabledMiddleClickPaste()) &&
(aButton != 0 || !isEnabledTouchCaret())) {
(aButton != 0 || !isEnabledAccessibleCaret())) {
gClickCount = 0;
// click on border of input
synthesizeMouse(input, 5, 5, { button: aButton });

Просмотреть файл

@ -563,12 +563,18 @@ nsBrowserElement::GetAllowedAudioChannels(
return;
}
nsCOMPtr<mozIApplication> parentApp;
aRv = GetParentApplication(getter_AddRefs(parentApp));
if (NS_WARN_IF(aRv.Failed())) {
return;
}
MOZ_LOG(AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
("nsBrowserElement, GetAllowedAudioChannels, this = %p\n", this));
GenerateAllowedAudioChannels(window, frameLoader, mBrowserElementAPI,
manifestURL, mBrowserElementAudioChannels,
aRv);
manifestURL, parentApp,
mBrowserElementAudioChannels, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return;
}
@ -583,6 +589,7 @@ nsBrowserElement::GenerateAllowedAudioChannels(
nsIFrameLoader* aFrameLoader,
nsIBrowserElementAPI* aAPI,
const nsAString& aManifestURL,
mozIApplication* aParentApp,
nsTArray<RefPtr<BrowserElementAudioChannel>>& aAudioChannels,
ErrorResult& aRv)
{
@ -625,6 +632,19 @@ nsBrowserElement::GenerateAllowedAudioChannels(
permissionName.AssignASCII("audio-channel-");
permissionName.AppendASCII(audioChannelTable[i].tag);
// In case of nested iframes we want to check if the parent has the
// permission to use this AudioChannel.
if (aParentApp) {
aRv = aParentApp->HasPermission(permissionName.get(), &allowed);
if (NS_WARN_IF(aRv.Failed())) {
return;
}
if (!allowed) {
continue;
}
}
aRv = app->HasPermission(permissionName.get(), &allowed);
if (NS_WARN_IF(aRv.Failed())) {
return;

Просмотреть файл

@ -125,11 +125,14 @@ public:
nsIFrameLoader* aFrameLoader,
nsIBrowserElementAPI* aAPI,
const nsAString& aManifestURL,
mozIApplication* aParentApp,
nsTArray<RefPtr<dom::BrowserElementAudioChannel>>& aAudioChannels,
ErrorResult& aRv);
protected:
NS_IMETHOD_(already_AddRefed<nsFrameLoader>) GetFrameLoader() = 0;
NS_IMETHOD GetParentApplication(mozIApplication** aApplication) = 0;
void InitBrowserElementAPI();
nsCOMPtr<nsIBrowserElementAPI> mBrowserElementAPI;
nsTArray<RefPtr<dom::BrowserElementAudioChannel>> mBrowserElementAudioChannels;

Просмотреть файл

@ -191,6 +191,35 @@ nsGenericHTMLFrameElement::GetFrameLoader()
return loader.forget();
}
NS_IMETHODIMP
nsGenericHTMLFrameElement::GetParentApplication(mozIApplication** aApplication)
{
if (!aApplication) {
return NS_ERROR_FAILURE;
}
*aApplication = nullptr;
uint32_t appId;
nsIPrincipal *principal = NodePrincipal();
nsresult rv = principal->GetAppId(&appId);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
nsCOMPtr<nsIAppsService> appsService = do_GetService(APPS_SERVICE_CONTRACTID);
if (NS_WARN_IF(!appsService)) {
return NS_ERROR_FAILURE;
}
rv = appsService->GetAppByLocalId(appId, aApplication);
if (NS_WARN_IF(NS_FAILED(rv))) {
return rv;
}
return NS_OK;
}
NS_IMETHODIMP
nsGenericHTMLFrameElement::SwapFrameLoaders(nsIFrameLoaderOwner* aOtherOwner)
{

Просмотреть файл

@ -5,7 +5,7 @@
#include "domstubs.idl"
[scriptable, uuid(7615408c-1fb3-4128-8dd5-a3e2f3fa8842)]
[builtinclass, scriptable, uuid(8e49f7b0-1f98-4939-bf91-e9c39cd56434)]
interface nsITabParent : nsISupports
{
void injectTouchEvent(in AString aType,

Просмотреть файл

@ -98,7 +98,7 @@ ContentBridgeChild::SendPBrowserConstructor(PBrowserChild* aActor,
jsipc::CPOWManager*
ContentBridgeChild::GetCPOWManager()
{
if (PJavaScriptChild* c = LoneManagedOrNull(ManagedPJavaScriptChild())) {
if (PJavaScriptChild* c = LoneManagedOrNullAsserts(ManagedPJavaScriptChild())) {
return CPOWManagerFor(c);
}
return CPOWManagerFor(SendPJavaScriptConstructor());

Просмотреть файл

@ -180,7 +180,7 @@ ContentBridgeParent::NotifyTabDestroyed()
jsipc::CPOWManager*
ContentBridgeParent::GetCPOWManager()
{
if (PJavaScriptParent* p = LoneManagedOrNull(ManagedPJavaScriptParent())) {
if (PJavaScriptParent* p = LoneManagedOrNullAsserts(ManagedPJavaScriptParent())) {
return CPOWManagerFor(p);
}
return nullptr;

Просмотреть файл

@ -1723,7 +1723,7 @@ ContentChild::DeallocPTestShellChild(PTestShellChild* shell)
jsipc::CPOWManager*
ContentChild::GetCPOWManager()
{
if (PJavaScriptChild* c = LoneManagedOrNull(ManagedPJavaScriptChild())) {
if (PJavaScriptChild* c = LoneManagedOrNullAsserts(ManagedPJavaScriptChild())) {
return CPOWManagerFor(c);
}
return CPOWManagerFor(SendPJavaScriptConstructor());
@ -2226,7 +2226,7 @@ ContentChild::ProcessingError(Result aCode, const char* aReason)
}
#if defined(MOZ_CRASHREPORTER) && !defined(MOZ_B2G)
if (PCrashReporterChild* c = LoneManagedOrNull(ManagedPCrashReporterChild())) {
if (PCrashReporterChild* c = LoneManagedOrNullAsserts(ManagedPCrashReporterChild())) {
CrashReporterChild* crashReporter =
static_cast<CrashReporterChild*>(c);
nsDependentCString reason(aReason);

Просмотреть файл

@ -2169,7 +2169,7 @@ ContentParent::ActorDestroy(ActorDestroyReason why)
// There's a window in which child processes can crash
// after IPC is established, but before a crash reporter
// is created.
if (PCrashReporterParent* p = LoneManagedOrNull(ManagedPCrashReporterParent())) {
if (PCrashReporterParent* p = LoneManagedOrNullAsserts(ManagedPCrashReporterParent())) {
CrashReporterParent* crashReporter =
static_cast<CrashReporterParent*>(p);
@ -2350,7 +2350,7 @@ ContentParent::NotifyTabDestroyed(const TabId& aTabId,
jsipc::CPOWManager*
ContentParent::GetCPOWManager()
{
if (PJavaScriptParent* p = LoneManagedOrNull(ManagedPJavaScriptParent())) {
if (PJavaScriptParent* p = LoneManagedOrNullAsserts(ManagedPJavaScriptParent())) {
return CPOWManagerFor(p);
}
return nullptr;
@ -2371,7 +2371,7 @@ ContentParent::DestroyTestShell(TestShellParent* aTestShell)
TestShellParent*
ContentParent::GetTestShellSingleton()
{
PTestShellParent* p = LoneManagedOrNull(ManagedPTestShellParent());
PTestShellParent* p = LoneManagedOrNullAsserts(ManagedPTestShellParent());
return static_cast<TestShellParent*>(p);
}
@ -3656,7 +3656,7 @@ ContentParent::KillHard(const char* aReason)
// We're about to kill the child process associated with this content.
// Something has gone wrong to get us here, so we generate a minidump
// of the parent and child for submission to the crash server.
if (PCrashReporterParent* p = LoneManagedOrNull(ManagedPCrashReporterParent())) {
if (PCrashReporterParent* p = LoneManagedOrNullAsserts(ManagedPCrashReporterParent())) {
CrashReporterParent* crashReporter =
static_cast<CrashReporterParent*>(p);
// GeneratePairedMinidump creates two minidumps for us - the main

Просмотреть файл

@ -35,7 +35,7 @@ CrashReporterChild::GetCrashReporter()
if (!reporters) {
return nullptr;
}
return LoneManagedOrNull(*reporters);
return LoneManagedOrNullAsserts(*reporters);
}
} // namespace dom

Просмотреть файл

@ -760,6 +760,13 @@ child:
*/
HandleAccessKey(uint32_t[] charCodes, bool isTrusted, int32_t modifierMask);
/**
* Propagate a refresh to the child process
*/
AudioChannelChangeNotification(uint32_t aAudioChannel,
float aVolume,
bool aMuted);
/*
* FIXME: write protocol!

Просмотреть файл

@ -2259,6 +2259,27 @@ TabChild::RecvHandleAccessKey(nsTArray<uint32_t>&& aCharCodes,
return true;
}
bool
TabChild::RecvAudioChannelChangeNotification(const uint32_t& aAudioChannel,
const float& aVolume,
const bool& aMuted)
{
nsCOMPtr<nsPIDOMWindow> window = do_GetInterface(WebNavigation());
if (window) {
RefPtr<AudioChannelService> service = AudioChannelService::GetOrCreate();
MOZ_ASSERT(service);
service->SetAudioChannelVolume(window,
static_cast<AudioChannel>(aAudioChannel),
aVolume);
service->SetAudioChannelMuted(window,
static_cast<AudioChannel>(aAudioChannel),
aMuted);
}
return true;
}
bool
TabChild::RecvDestroy()
{

Просмотреть файл

@ -504,6 +504,10 @@ public:
const bool& aIsTrusted,
const int32_t& aModifierMask) override;
virtual bool RecvAudioChannelChangeNotification(const uint32_t& aAudioChannel,
const float& aVolume,
const bool& aMuted) override;
/**
* Native widget remoting protocol for use with windowed plugins with e10s.
*/

Просмотреть файл

@ -184,7 +184,7 @@ private:
// Our TabParent may have been destroyed already. If so, don't send any
// fds over, just go back to the IO thread and close them.
if (!tabParent->IsDestroyed()) {
mozilla::Unused << tabParent->SendCacheFileDescriptor(mPath, fd);
Unused << tabParent->SendCacheFileDescriptor(mPath, fd);
}
if (!mFD) {
@ -232,7 +232,7 @@ private:
// Intentionally leak the runnable (but not the fd) rather
// than crash when trying to release a main thread object
// off the main thread.
mozilla::Unused << mTabParent.forget();
Unused << mTabParent.forget();
CloseFile();
}
}
@ -387,6 +387,11 @@ TabParent::AddWindowListeners()
mPresShellWithRefreshListener = shell;
shell->AddPostRefreshObserver(this);
}
RefPtr<AudioChannelService> acs = AudioChannelService::GetOrCreate();
if (acs) {
acs->RegisterTabParent(this);
}
}
}
@ -405,6 +410,11 @@ TabParent::RemoveWindowListeners()
mPresShellWithRefreshListener->RemovePostRefreshObserver(this);
mPresShellWithRefreshListener = nullptr;
}
RefPtr<AudioChannelService> acs = AudioChannelService::GetOrCreate();
if (acs) {
acs->UnregisterTabParent(this);
}
}
void
@ -2338,7 +2348,7 @@ TabParent::GetTabIdFrom(nsIDocShell *docShell)
RenderFrameParent*
TabParent::GetRenderFrame()
{
PRenderFrameParent* p = LoneManagedOrNull(ManagedPRenderFrameParent());
PRenderFrameParent* p = LoneManagedOrNullAsserts(ManagedPRenderFrameParent());
return static_cast<RenderFrameParent*>(p);
}
@ -2658,7 +2668,6 @@ TabParent::RecvAudioChannelActivityNotification(const uint32_t& aAudioChannel,
nsCOMPtr<nsIObserverService> os = services::GetObserverService();
if (os) {
RefPtr<AudioChannelService> service = AudioChannelService::GetOrCreate();
nsAutoCString topic;
topic.Assign("audiochannel-activity-");
topic.Append(AudioChannelService::GetAudioChannelTable()[aAudioChannel].tag);
@ -3419,6 +3428,33 @@ TabParent::GetShowInfo()
mDPI, mDefaultScale.scale);
}
void
TabParent::AudioChannelChangeNotification(nsPIDOMWindow* aWindow,
AudioChannel aAudioChannel,
float aVolume,
bool aMuted)
{
if (!mFrameElement || !mFrameElement->OwnerDoc()) {
return;
}
nsCOMPtr<nsPIDOMWindow> window = mFrameElement->OwnerDoc()->GetWindow();
while (window) {
if (window == aWindow) {
Unused << SendAudioChannelChangeNotification(static_cast<uint32_t>(aAudioChannel),
aVolume, aMuted);
break;
}
nsCOMPtr<nsPIDOMWindow> win = window->GetScriptableParent();
if (window == win) {
break;
}
window = win;
}
}
NS_IMETHODIMP
FakeChannel::OnAuthAvailable(nsISupports *aContext, nsIAuthInformation *aAuthInfo)
{

Просмотреть файл

@ -9,6 +9,7 @@
#include "js/TypeDecls.h"
#include "mozilla/ContentCache.h"
#include "mozilla/dom/AudioChannelBinding.h"
#include "mozilla/dom/ipc/IdType.h"
#include "mozilla/dom/PBrowserParent.h"
#include "mozilla/dom/PContent.h"
@ -455,6 +456,11 @@ public:
void OnStartSignedPackageRequest(nsIChannel* aChannel,
const nsACString& aPackageId);
void AudioChannelChangeNotification(nsPIDOMWindow* aWindow,
AudioChannel aAudioChannel,
float aVolume,
bool aMuted);
protected:
bool ReceiveMessage(const nsString& aMessage,
bool aSync,

Просмотреть файл

@ -172,3 +172,6 @@ PEExpectedVariableName=Expected identifier for variable name but found '%1$S'.
PEExpectedVariableFallback=Expected variable reference fallback after ','.
PEExpectedVariableCommaOrCloseParen=Expected ',' or ')' after variable name in variable reference but found '%1$S'.
PESubgridNotSupported=Support for the 'subgrid' keyword of CSS Grid is not enabled.
PEMoreThanOneGridRepeatAutoFillInNameList=Only one repeat(auto-fill, ...) is allowed in a name list for a subgrid.
PEMoreThanOneGridRepeatAutoFillFitInTrackList=Only one repeat(auto-fill, ...) or repeat(auto-fit, ...) is allowed in a track list.
PEMoreThanOneGridRepeatTrackSize=Only one track size is allowed inside repeat(auto-fit/auto-fill, ...).

Просмотреть файл

@ -17,17 +17,12 @@
#include "WaveReader.h"
#include "WebMDecoder.h"
#include "WebMReader.h"
#include "WebMDemuxer.h"
#ifdef MOZ_RAW
#include "RawDecoder.h"
#include "RawReader.h"
#endif
#ifdef MOZ_GSTREAMER
#include "GStreamerDecoder.h"
#include "GStreamerReader.h"
#endif
#ifdef MOZ_ANDROID_OMX
#include "AndroidMediaDecoder.h"
#include "AndroidMediaReader.h"
@ -172,23 +167,6 @@ DecoderTraits::IsWebMAudioType(const nsACString& aType)
return aType.EqualsASCII("audio/webm");
}
#ifdef MOZ_GSTREAMER
static bool
IsGStreamerSupportedType(const nsACString& aMimeType)
{
if (DecoderTraits::IsWebMTypeAndEnabled(aMimeType))
return false;
if (!MediaDecoder::IsGStreamerEnabled())
return false;
if (IsOggType(aMimeType) && !Preferences::GetBool("media.prefer-gstreamer", false))
return false;
return GStreamerDecoder::CanHandleMediaType(aMimeType, nullptr);
}
#endif
#ifdef MOZ_OMX_DECODER
static const char* const gOmxTypes[] = {
"audio/mpeg",
@ -498,12 +476,6 @@ DecoderTraits::CanHandleMediaType(const char* aMIMEType,
if (IsAACSupportedType(nsDependentCString(aMIMEType))) {
return CANPLAY_MAYBE;
}
#ifdef MOZ_GSTREAMER
if (GStreamerDecoder::CanHandleMediaType(nsDependentCString(aMIMEType),
aHaveRequestedCodecs ? &aRequestedCodecs : nullptr)) {
return aHaveRequestedCodecs ? CANPLAY_YES : CANPLAY_MAYBE;
}
#endif
#ifdef MOZ_OMX_DECODER
if (IsOmxSupportedType(nsDependentCString(aMIMEType))) {
return CANPLAY_MAYBE;
@ -550,12 +522,6 @@ InstantiateDecoder(const nsACString& aType, MediaDecoderOwner* aOwner)
decoder = new ADTSDecoder(aOwner);
return decoder.forget();
}
#ifdef MOZ_GSTREAMER
if (IsGStreamerSupportedType(aType)) {
decoder = new GStreamerDecoder(aOwner);
return decoder.forget();
}
#endif
#ifdef MOZ_RAW
if (IsRawType(aType)) {
decoder = new RawDecoder(aOwner);
@ -650,11 +616,6 @@ MediaDecoderReader* DecoderTraits::CreateReader(const nsACString& aType, Abstrac
if (IsAACSupportedType(aType)) {
decoderReader = new MediaFormatReader(aDecoder, new ADTSDemuxer(aDecoder->GetResource()));
} else
#ifdef MOZ_GSTREAMER
if (IsGStreamerSupportedType(aType)) {
decoderReader = new GStreamerReader(aDecoder);
} else
#endif
#ifdef MOZ_RAW
if (IsRawType(aType)) {
decoderReader = new RawReader(aDecoder);
@ -677,13 +638,10 @@ MediaDecoderReader* DecoderTraits::CreateReader(const nsACString& aType, Abstrac
decoderReader = new AndroidMediaReader(aDecoder, aType);
} else
#endif
if (IsWebMSupportedType(aType)) {
decoderReader = Preferences::GetBool("media.format-reader.webm", true) ?
static_cast<MediaDecoderReader*>(new MediaFormatReader(aDecoder, new WebMDemuxer(aDecoder->GetResource()))) :
new WebMReader(aDecoder);
decoderReader =
new MediaFormatReader(aDecoder, new WebMDemuxer(aDecoder->GetResource()));
} else
#ifdef MOZ_DIRECTSHOW
if (IsDirectShowSupportedType(aType)) {
decoderReader = new DirectShowReader(aDecoder);
@ -715,9 +673,6 @@ bool DecoderTraits::IsSupportedInVideoDocument(const nsACString& aType)
!IsB2GSupportOnlyType(aType)) ||
#endif
IsWebMSupportedType(aType) ||
#ifdef MOZ_GSTREAMER
IsGStreamerSupportedType(aType) ||
#endif
#ifdef MOZ_ANDROID_OMX
(MediaDecoder::IsAndroidMediaEnabled() && IsAndroidMediaType(aType)) ||
#endif

Просмотреть файл

@ -1662,14 +1662,6 @@ MediaDecoder::IsRtspEnabled()
}
#endif
#ifdef MOZ_GSTREAMER
bool
MediaDecoder::IsGStreamerEnabled()
{
return Preferences::GetBool("media.gstreamer.enabled");
}
#endif
#ifdef MOZ_OMX_DECODER
bool
MediaDecoder::IsOmxEnabled()

Просмотреть файл

@ -669,10 +669,6 @@ private:
static bool IsRtspEnabled();
#endif
#ifdef MOZ_GSTREAMER
static bool IsGStreamerEnabled();
#endif
#ifdef MOZ_OMX_DECODER
static bool IsOmxEnabled();
#endif

Просмотреть файл

@ -582,7 +582,7 @@ void
GMPParent::GetCrashID(nsString& aResult)
{
CrashReporterParent* cr =
static_cast<CrashReporterParent*>(LoneManagedOrNull(ManagedPCrashReporterParent()));
static_cast<CrashReporterParent*>(LoneManagedOrNullAsserts(ManagedPCrashReporterParent()));
if (NS_WARN_IF(!cr)) {
return;
}

Просмотреть файл

@ -1,201 +0,0 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "GStreamerAllocator.h"
#include <gst/video/video.h>
#include <gst/video/gstvideometa.h>
#include "GStreamerLoader.h"
using namespace mozilla::layers;
namespace mozilla {
typedef struct
{
GstAllocator parent;
GStreamerReader *reader;
} MozGfxMemoryAllocator;
typedef struct
{
GstAllocatorClass parent;
} MozGfxMemoryAllocatorClass;
typedef struct
{
GstMemory memory;
PlanarYCbCrImage* image;
guint8* data;
} MozGfxMemory;
typedef struct
{
GstMeta meta;
} MozGfxMeta;
typedef struct
{
GstVideoBufferPoolClass parent_class;
} MozGfxBufferPoolClass;
typedef struct
{
GstVideoBufferPool pool;
} MozGfxBufferPool;
// working around GTK+ bug https://bugzilla.gnome.org/show_bug.cgi?id=723899
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-function"
G_DEFINE_TYPE(MozGfxMemoryAllocator, moz_gfx_memory_allocator, GST_TYPE_ALLOCATOR);
G_DEFINE_TYPE(MozGfxBufferPool, moz_gfx_buffer_pool, GST_TYPE_VIDEO_BUFFER_POOL);
#pragma GCC diagnostic pop
void
moz_gfx_memory_reset(MozGfxMemory *mem)
{
if (mem->image)
mem->image->Release();
ImageContainer* container = ((MozGfxMemoryAllocator*) mem->memory.allocator)->reader->GetImageContainer();
mem->image = container->CreatePlanarYCbCrImage().forget().take();
mem->data = mem->image->AllocateAndGetNewBuffer(mem->memory.size);
}
static GstMemory*
moz_gfx_memory_allocator_alloc(GstAllocator* aAllocator, gsize aSize,
GstAllocationParams* aParams)
{
MozGfxMemory* mem = g_slice_new (MozGfxMemory);
gsize maxsize = aSize + aParams->prefix + aParams->padding;
gst_memory_init(GST_MEMORY_CAST (mem),
(GstMemoryFlags)aParams->flags,
aAllocator, NULL, maxsize, aParams->align,
aParams->prefix, aSize);
mem->image = NULL;
moz_gfx_memory_reset(mem);
return (GstMemory *) mem;
}
static void
moz_gfx_memory_allocator_free (GstAllocator * allocator, GstMemory * gmem)
{
MozGfxMemory *mem = (MozGfxMemory *) gmem;
if (mem->memory.parent)
goto sub_mem;
if (mem->image)
mem->image->Release();
sub_mem:
g_slice_free (MozGfxMemory, mem);
}
static gpointer
moz_gfx_memory_map (MozGfxMemory * mem, gsize maxsize, GstMapFlags flags)
{
// check that the allocation didn't fail
if (mem->data == nullptr)
return nullptr;
return mem->data + mem->memory.offset;
}
static gboolean
moz_gfx_memory_unmap (MozGfxMemory * mem)
{
return TRUE;
}
static MozGfxMemory *
moz_gfx_memory_share (MozGfxMemory * mem, gssize offset, gsize size)
{
MozGfxMemory *sub;
GstMemory *parent;
/* find the real parent */
if ((parent = mem->memory.parent) == NULL)
parent = (GstMemory *) mem;
if (size == (gsize) -1)
size = mem->memory.size - offset;
/* the shared memory is always readonly */
sub = g_slice_new (MozGfxMemory);
gst_memory_init (GST_MEMORY_CAST (sub),
(GstMemoryFlags) (GST_MINI_OBJECT_FLAGS (parent) | GST_MINI_OBJECT_FLAG_LOCK_READONLY),
mem->memory.allocator, &mem->memory, mem->memory.maxsize, mem->memory.align,
mem->memory.offset + offset, size);
sub->image = mem->image;
sub->data = mem->data;
return sub;
}
static void
moz_gfx_memory_allocator_class_init (MozGfxMemoryAllocatorClass * klass)
{
GstAllocatorClass *allocator_class;
allocator_class = (GstAllocatorClass *) klass;
allocator_class->alloc = moz_gfx_memory_allocator_alloc;
allocator_class->free = moz_gfx_memory_allocator_free;
}
static void
moz_gfx_memory_allocator_init (MozGfxMemoryAllocator * allocator)
{
GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
alloc->mem_type = "moz-gfx-image";
alloc->mem_map = (GstMemoryMapFunction) moz_gfx_memory_map;
alloc->mem_unmap = (GstMemoryUnmapFunction) moz_gfx_memory_unmap;
alloc->mem_share = (GstMemoryShareFunction) moz_gfx_memory_share;
/* fallback copy and is_span */
}
void
moz_gfx_memory_allocator_set_reader(GstAllocator* aAllocator, GStreamerReader* aReader)
{
MozGfxMemoryAllocator *allocator = (MozGfxMemoryAllocator *) aAllocator;
allocator->reader = aReader;
}
RefPtr<PlanarYCbCrImage>
moz_gfx_memory_get_image(GstMemory *aMemory)
{
NS_ASSERTION(GST_IS_MOZ_GFX_MEMORY_ALLOCATOR(aMemory->allocator), "Should be a gfx image");
return ((MozGfxMemory *) aMemory)->image;
}
void
moz_gfx_buffer_pool_reset_buffer (GstBufferPool* aPool, GstBuffer* aBuffer)
{
GstMemory* mem = gst_buffer_peek_memory(aBuffer, 0);
NS_ASSERTION(GST_IS_MOZ_GFX_MEMORY_ALLOCATOR(mem->allocator), "Should be a gfx image");
moz_gfx_memory_reset((MozGfxMemory *) mem);
GST_BUFFER_POOL_CLASS(moz_gfx_buffer_pool_parent_class)->reset_buffer(aPool, aBuffer);
}
static void
moz_gfx_buffer_pool_class_init (MozGfxBufferPoolClass * klass)
{
GstBufferPoolClass *pool_class = (GstBufferPoolClass *) klass;
pool_class->reset_buffer = moz_gfx_buffer_pool_reset_buffer;
}
static void
moz_gfx_buffer_pool_init (MozGfxBufferPool * pool)
{
}
} // namespace mozilla

Просмотреть файл

@ -1,25 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(GStreamerAllocator_h_)
#define GStreamerAllocator_h_
#include "GStreamerReader.h"
#define GST_TYPE_MOZ_GFX_MEMORY_ALLOCATOR (moz_gfx_memory_allocator_get_type())
#define GST_IS_MOZ_GFX_MEMORY_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MOZ_GFX_MEMORY_ALLOCATOR))
#define GST_TYPE_MOZ_GFX_BUFFER_POOL (moz_gfx_buffer_pool_get_type())
#define GST_IS_MOZ_GFX_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MOZ_GFX_BUFFER_POOL))
namespace mozilla {
GType moz_gfx_memory_allocator_get_type();
void moz_gfx_memory_allocator_set_reader(GstAllocator *aAllocator, GStreamerReader* aReader);
RefPtr<layers::PlanarYCbCrImage> moz_gfx_memory_get_image(GstMemory *aMemory);
GType moz_gfx_buffer_pool_get_type();
} // namespace mozilla
#endif

Просмотреть файл

@ -1,28 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaDecoderStateMachine.h"
#include "GStreamerReader.h"
#include "GStreamerDecoder.h"
#include "GStreamerFormatHelper.h"
namespace mozilla {
MediaDecoderStateMachine* GStreamerDecoder::CreateStateMachine()
{
return new MediaDecoderStateMachine(this, new GStreamerReader(this));
}
bool
GStreamerDecoder::CanHandleMediaType(const nsACString& aMIMEType,
const nsAString* aCodecs)
{
return MediaDecoder::IsGStreamerEnabled() &&
GStreamerFormatHelper::Instance()->CanHandleMediaType(aMIMEType, aCodecs);
}
} // namespace mozilla

Просмотреть файл

@ -1,28 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(GStreamerDecoder_h_)
#define GStreamerDecoder_h_
#include "MediaDecoder.h"
#include "nsXPCOMStrings.h"
namespace mozilla {
class GStreamerDecoder : public MediaDecoder
{
public:
explicit GStreamerDecoder(MediaDecoderOwner* aOwner) : MediaDecoder(aOwner) {}
virtual MediaDecoder* Clone(MediaDecoderOwner* aOwner) {
return new GStreamerDecoder(aOwner);
}
virtual MediaDecoderStateMachine* CreateStateMachine();
static bool CanHandleMediaType(const nsACString& aMIMEType, const nsAString* aCodecs);
};
} // namespace mozilla
#endif

Просмотреть файл

@ -1,373 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "GStreamerFormatHelper.h"
#include "nsCharSeparatedTokenizer.h"
#include "nsString.h"
#include "GStreamerLoader.h"
#include "mozilla/Logging.h"
#include "mozilla/Preferences.h"
#define ENTRY_FORMAT(entry) entry[0]
#define ENTRY_CAPS(entry) entry[1]
namespace mozilla {
extern LazyLogModule gMediaDecoderLog;
#define LOG(msg, ...) \
MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, ("GStreamerFormatHelper " msg, ##__VA_ARGS__))
GStreamerFormatHelper* GStreamerFormatHelper::gInstance = nullptr;
bool GStreamerFormatHelper::sLoadOK = false;
GStreamerFormatHelper* GStreamerFormatHelper::Instance() {
if (!gInstance) {
if ((sLoadOK = load_gstreamer())) {
gst_init(nullptr, nullptr);
}
gInstance = new GStreamerFormatHelper();
}
return gInstance;
}
void GStreamerFormatHelper::Shutdown() {
delete gInstance;
gInstance = nullptr;
}
static char const *const sContainers[][2] = {
{"video/mp4", "video/quicktime"},
{"video/x-m4v", "video/quicktime"},
{"video/quicktime", "video/quicktime"},
{"audio/mp4", "audio/x-m4a"},
{"audio/x-m4a", "audio/x-m4a"},
{"audio/mpeg", "audio/mpeg, mpegversion=(int)1"},
{"audio/mp3", "audio/mpeg, mpegversion=(int)1"},
};
static char const *const sCodecs[9][2] = {
{"avc1.42E01E", "video/x-h264"},
{"avc1.42001E", "video/x-h264"},
{"avc1.58A01E", "video/x-h264"},
{"avc1.4D401E", "video/x-h264"},
{"avc1.64001E", "video/x-h264"},
{"avc1.64001F", "video/x-h264"},
{"mp4v.20.3", "video/3gpp"},
{"mp4a.40.2", "audio/mpeg, mpegversion=(int)4"},
{"mp3", "audio/mpeg, mpegversion=(int)1"},
};
static char const * const sDefaultCodecCaps[][2] = {
{"video/mp4", "video/x-h264"},
{"video/x-m4v", "video/x-h264"},
{"video/quicktime", "video/x-h264"},
{"audio/mp4", "audio/mpeg, mpegversion=(int)4"},
{"audio/x-m4a", "audio/mpeg, mpegversion=(int)4"},
{"audio/mp3", "audio/mpeg, layer=(int)3"},
{"audio/mpeg", "audio/mpeg, layer=(int)3"}
};
static char const * const sPluginBlockList[] = {
"flump3dec",
"h264parse",
};
GStreamerFormatHelper::GStreamerFormatHelper()
: mFactories(nullptr),
mCookie(static_cast<uint32_t>(-1))
{
if (!sLoadOK) {
return;
}
mSupportedContainerCaps = gst_caps_new_empty();
for (unsigned int i = 0; i < G_N_ELEMENTS(sContainers); i++) {
const char* capsString = sContainers[i][1];
GstCaps* caps = gst_caps_from_string(capsString);
gst_caps_append(mSupportedContainerCaps, caps);
}
mSupportedCodecCaps = gst_caps_new_empty();
for (unsigned int i = 0; i < G_N_ELEMENTS(sCodecs); i++) {
const char* capsString = sCodecs[i][1];
GstCaps* caps = gst_caps_from_string(capsString);
gst_caps_append(mSupportedCodecCaps, caps);
}
}
GStreamerFormatHelper::~GStreamerFormatHelper() {
if (!sLoadOK) {
return;
}
gst_caps_unref(mSupportedContainerCaps);
gst_caps_unref(mSupportedCodecCaps);
if (mFactories)
g_list_free(mFactories);
}
static GstCaps *
GetContainerCapsFromMIMEType(const char *aType) {
/* convert aMIMEType to gst container caps */
const char* capsString = nullptr;
for (uint32_t i = 0; i < G_N_ELEMENTS(sContainers); i++) {
if (!strcmp(ENTRY_FORMAT(sContainers[i]), aType)) {
capsString = ENTRY_CAPS(sContainers[i]);
break;
}
}
if (!capsString) {
/* we couldn't find any matching caps */
return nullptr;
}
return gst_caps_from_string(capsString);
}
static GstCaps *
GetDefaultCapsFromMIMEType(const char *aType) {
GstCaps *caps = GetContainerCapsFromMIMEType(aType);
for (uint32_t i = 0; i < G_N_ELEMENTS(sDefaultCodecCaps); i++) {
if (!strcmp(sDefaultCodecCaps[i][0], aType)) {
GstCaps *tmp = gst_caps_from_string(sDefaultCodecCaps[i][1]);
gst_caps_append(caps, tmp);
return caps;
}
}
return nullptr;
}
bool GStreamerFormatHelper::CanHandleMediaType(const nsACString& aMIMEType,
const nsAString* aCodecs) {
if (!sLoadOK) {
return false;
}
const char *type;
NS_CStringGetData(aMIMEType, &type, nullptr);
GstCaps *caps;
if (aCodecs && !aCodecs->IsEmpty()) {
caps = ConvertFormatsToCaps(type, aCodecs);
} else {
// Get a minimal set of codec caps for this MIME type we should support so
// that we don't overreport MIME types we are able to play.
caps = GetDefaultCapsFromMIMEType(type);
}
if (!caps) {
return false;
}
bool ret = HaveElementsToProcessCaps(caps);
gst_caps_unref(caps);
return ret;
}
GstCaps* GStreamerFormatHelper::ConvertFormatsToCaps(const char* aMIMEType,
const nsAString* aCodecs) {
NS_ASSERTION(sLoadOK, "GStreamer library not linked");
unsigned int i;
GstCaps *caps = GetContainerCapsFromMIMEType(aMIMEType);
if (!caps) {
return nullptr;
}
/* container only */
if (!aCodecs) {
return caps;
}
nsCharSeparatedTokenizer tokenizer(*aCodecs, ',');
while (tokenizer.hasMoreTokens()) {
const nsSubstring& codec = tokenizer.nextToken();
const char *capsString = nullptr;
for (i = 0; i < G_N_ELEMENTS(sCodecs); i++) {
if (codec.EqualsASCII(ENTRY_FORMAT(sCodecs[i]))) {
capsString = ENTRY_CAPS(sCodecs[i]);
break;
}
}
if (!capsString) {
gst_caps_unref(caps);
return nullptr;
}
GstCaps* tmp = gst_caps_from_string(capsString);
/* appends and frees tmp */
gst_caps_append(caps, tmp);
}
return caps;
}
/* static */ bool
GStreamerFormatHelper::IsBlockListEnabled()
{
static bool sBlockListEnabled;
static bool sBlockListEnabledCached = false;
if (!sBlockListEnabledCached) {
Preferences::AddBoolVarCache(&sBlockListEnabled,
"media.gstreamer.enable-blacklist", true);
sBlockListEnabledCached = true;
}
return sBlockListEnabled;
}
/* static */ bool
GStreamerFormatHelper::IsPluginFeatureBlocked(GstPluginFeature *aFeature)
{
if (!IsBlockListEnabled()) {
return false;
}
const gchar *factoryName =
gst_plugin_feature_get_name(aFeature);
for (unsigned int i = 0; i < G_N_ELEMENTS(sPluginBlockList); i++) {
if (!strcmp(factoryName, sPluginBlockList[i])) {
LOG("rejecting disabled plugin %s", factoryName);
return true;
}
}
return false;
}
static gboolean FactoryFilter(GstPluginFeature *aFeature, gpointer)
{
if (!GST_IS_ELEMENT_FACTORY(aFeature)) {
return FALSE;
}
const gchar *className =
gst_element_factory_get_klass(GST_ELEMENT_FACTORY_CAST(aFeature));
// NB: We skip filtering parsers here, because adding them to
// the list can give false decoder positives to canPlayType().
if (!strstr(className, "Decoder") && !strstr(className, "Demux")) {
return FALSE;
}
return
gst_plugin_feature_get_rank(aFeature) >= GST_RANK_MARGINAL &&
!GStreamerFormatHelper::IsPluginFeatureBlocked(aFeature);
}
/**
* Returns true if any |aFactory| caps intersect with |aCaps|
*/
static bool SupportsCaps(GstElementFactory *aFactory, GstCaps *aCaps)
{
for (const GList *iter = gst_element_factory_get_static_pad_templates(aFactory); iter; iter = iter->next) {
GstStaticPadTemplate *templ = static_cast<GstStaticPadTemplate *>(iter->data);
if (templ->direction == GST_PAD_SRC) {
continue;
}
GstCaps *caps = gst_static_caps_get(&templ->static_caps);
if (!caps) {
continue;
}
bool supported = gst_caps_can_intersect(caps, aCaps);
gst_caps_unref(caps);
if (supported) {
return true;
}
}
return false;
}
bool GStreamerFormatHelper::HaveElementsToProcessCaps(GstCaps* aCaps) {
NS_ASSERTION(sLoadOK, "GStreamer library not linked");
GList* factories = GetFactories();
/* here aCaps contains [containerCaps, [codecCaps1, [codecCaps2, ...]]] so process
* caps structures individually as we want one element for _each_
* structure */
for (unsigned int i = 0; i < gst_caps_get_size(aCaps); i++) {
GstStructure* s = gst_caps_get_structure(aCaps, i);
GstCaps* caps = gst_caps_new_full(gst_structure_copy(s), nullptr);
bool found = false;
for (GList *elem = factories; elem; elem = elem->next) {
if (SupportsCaps(GST_ELEMENT_FACTORY_CAST(elem->data), caps)) {
found = true;
break;
}
}
gst_caps_unref(caps);
if (!found) {
return false;
}
}
return true;
}
bool GStreamerFormatHelper::CanHandleContainerCaps(GstCaps* aCaps)
{
NS_ASSERTION(sLoadOK, "GStreamer library not linked");
return gst_caps_can_intersect(aCaps, mSupportedContainerCaps);
}
bool GStreamerFormatHelper::CanHandleCodecCaps(GstCaps* aCaps)
{
NS_ASSERTION(sLoadOK, "GStreamer library not linked");
return gst_caps_can_intersect(aCaps, mSupportedCodecCaps);
}
GList* GStreamerFormatHelper::GetFactories() {
NS_ASSERTION(sLoadOK, "GStreamer library not linked");
#if GST_VERSION_MAJOR >= 1
uint32_t cookie = gst_registry_get_feature_list_cookie(gst_registry_get());
#else
uint32_t cookie = gst_default_registry_get_feature_list_cookie();
#endif
if (cookie != mCookie) {
g_list_free(mFactories);
#if GST_VERSION_MAJOR >= 1
mFactories =
gst_registry_feature_filter(gst_registry_get(),
(GstPluginFeatureFilter)FactoryFilter,
false, nullptr);
#else
mFactories =
gst_default_registry_feature_filter((GstPluginFeatureFilter)FactoryFilter,
false, nullptr);
#endif
mCookie = cookie;
}
return mFactories;
}
} // namespace mozilla

Просмотреть файл

@ -1,81 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(GStreamerFormatHelper_h_)
#define GStreamerFormatHelper_h_
#include <gst/gst.h>
#include <mozilla/Types.h>
#include "nsXPCOMStrings.h"
namespace mozilla {
class GStreamerFormatHelper {
/* This class can be used to query the GStreamer registry for the required
* demuxers/decoders from nsHTMLMediaElement::CanPlayType.
* It implements looking at the GstRegistry to check if elements to
* demux/decode the formats passed to CanPlayType() are actually installed.
*/
public:
static GStreamerFormatHelper* Instance();
~GStreamerFormatHelper();
bool CanHandleMediaType(const nsACString& aMIMEType,
const nsAString* aCodecs);
bool CanHandleContainerCaps(GstCaps* aCaps);
bool CanHandleCodecCaps(GstCaps* aCaps);
static bool IsBlockListEnabled();
static bool IsPluginFeatureBlocked(GstPluginFeature *aFeature);
static GstCaps* ConvertFormatsToCaps(const char* aMIMEType,
const nsAString* aCodecs);
static void Shutdown();
private:
GStreamerFormatHelper();
char* const *CodecListFromCaps(GstCaps* aCaps);
bool HaveElementsToProcessCaps(GstCaps* aCaps);
GList* GetFactories();
static GStreamerFormatHelper* gInstance;
/* table to convert from container MIME types to GStreamer caps */
static char const *const mContainers[6][2];
/* table to convert from codec MIME types to GStreamer caps */
static char const *const mCodecs[9][2];
/*
* True iff we were able to find the proper GStreamer libs and the functions
* we need.
*/
static bool sLoadOK;
/* whitelist of supported container/codec gst caps */
GstCaps* mSupportedContainerCaps;
GstCaps* mSupportedCodecCaps;
/* list of GStreamer element factories
* Element factories are the basic types retrieved from the GStreamer
* registry, they describe all plugins and elements that GStreamer can
* create.
* This means that element factories are useful for automated element
* instancing, such as what autopluggers do,
* and for creating lists of available elements. */
GList* mFactories;
/* Storage for the default registrys feature list cookie.
* It changes every time a feature is added to or removed from the
* GStreamer registry. */
uint32_t mCookie;
};
} //namespace mozilla
#endif

Просмотреть файл

@ -1,173 +0,0 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef __APPLE__
/*
* List of symbol names we need to dlsym from the gstreamer library.
*/
GST_FUNC(LIBGSTAPP, gst_app_sink_get_type)
GST_FUNC(LIBGSTAPP, gst_app_sink_set_callbacks)
GST_FUNC(LIBGSTAPP, gst_app_src_end_of_stream)
GST_FUNC(LIBGSTAPP, gst_app_src_get_size)
GST_FUNC(LIBGSTAPP, gst_app_src_get_type)
GST_FUNC(LIBGSTAPP, gst_app_src_push_buffer)
GST_FUNC(LIBGSTAPP, gst_app_src_set_callbacks)
GST_FUNC(LIBGSTAPP, gst_app_src_set_caps)
GST_FUNC(LIBGSTAPP, gst_app_src_set_size)
GST_FUNC(LIBGSTAPP, gst_app_src_set_stream_type)
GST_FUNC(LIBGSTREAMER, gst_bin_get_by_name)
GST_FUNC(LIBGSTREAMER, gst_bin_get_type)
GST_FUNC(LIBGSTREAMER, gst_bin_iterate_recurse)
GST_FUNC(LIBGSTREAMER, gst_buffer_get_type)
GST_FUNC(LIBGSTREAMER, gst_buffer_new)
GST_FUNC(LIBGSTREAMER, gst_bus_set_sync_handler)
GST_FUNC(LIBGSTREAMER, gst_bus_timed_pop_filtered)
GST_FUNC(LIBGSTREAMER, gst_caps_append)
GST_FUNC(LIBGSTREAMER, gst_caps_can_intersect)
GST_FUNC(LIBGSTREAMER, gst_caps_from_string)
GST_FUNC(LIBGSTREAMER, gst_caps_get_size)
GST_FUNC(LIBGSTREAMER, gst_caps_get_structure)
GST_FUNC(LIBGSTREAMER, gst_caps_new_any)
GST_FUNC(LIBGSTREAMER, gst_caps_new_empty)
GST_FUNC(LIBGSTREAMER, gst_caps_new_full)
GST_FUNC(LIBGSTREAMER, gst_caps_new_simple)
GST_FUNC(LIBGSTREAMER, gst_caps_set_simple)
GST_FUNC(LIBGSTREAMER, gst_element_factory_get_static_pad_templates)
GST_FUNC(LIBGSTREAMER, gst_element_factory_get_type)
GST_FUNC(LIBGSTREAMER, gst_element_factory_make)
GST_FUNC(LIBGSTREAMER, gst_element_get_factory)
GST_FUNC(LIBGSTREAMER, gst_element_get_static_pad)
GST_FUNC(LIBGSTREAMER, gst_element_get_type)
GST_FUNC(LIBGSTREAMER, gst_element_query_convert)
GST_FUNC(LIBGSTREAMER, gst_element_query_duration)
GST_FUNC(LIBGSTREAMER, gst_element_seek_simple)
GST_FUNC(LIBGSTREAMER, gst_element_set_state)
GST_FUNC(LIBGSTREAMER, gst_flow_get_name)
GST_FUNC(LIBGSTREAMER, gst_init)
GST_FUNC(LIBGSTREAMER, gst_init_check)
GST_FUNC(LIBGSTREAMER, gst_iterator_free)
GST_FUNC(LIBGSTREAMER, gst_iterator_next)
GST_FUNC(LIBGSTREAMER, gst_message_parse_error)
GST_FUNC(LIBGSTREAMER, gst_message_type_get_name)
GST_FUNC(LIBGSTREAMER, gst_mini_object_ref)
GST_FUNC(LIBGSTREAMER, gst_mini_object_unref)
GST_FUNC(LIBGSTREAMER, gst_object_get_name)
GST_FUNC(LIBGSTREAMER, gst_object_get_parent)
GST_FUNC(LIBGSTREAMER, gst_object_unref)
GST_FUNC(LIBGSTREAMER, gst_pad_get_element_private)
GST_FUNC(LIBGSTREAMER, gst_pad_set_element_private)
GST_FUNC(LIBGSTREAMER, gst_parse_bin_from_description)
GST_FUNC(LIBGSTREAMER, gst_pipeline_get_bus)
GST_FUNC(LIBGSTREAMER, gst_pipeline_get_type)
GST_FUNC(LIBGSTREAMER, gst_plugin_feature_get_rank)
GST_FUNC(LIBGSTREAMER, gst_plugin_feature_get_type)
GST_FUNC(LIBGSTREAMER, gst_registry_feature_filter)
GST_FUNC(LIBGSTREAMER, gst_registry_get_feature_list_cookie)
GST_FUNC(LIBGSTREAMER, gst_segment_init)
GST_FUNC(LIBGSTREAMER, gst_segment_to_stream_time)
GST_FUNC(LIBGSTREAMER, gst_static_caps_get)
GST_FUNC(LIBGSTREAMER, gst_structure_copy)
GST_FUNC(LIBGSTREAMER, gst_structure_get_fraction)
GST_FUNC(LIBGSTREAMER, gst_structure_get_int)
GST_FUNC(LIBGSTREAMER, gst_structure_get_value)
GST_FUNC(LIBGSTREAMER, gst_structure_new)
GST_FUNC(LIBGSTREAMER, gst_util_uint64_scale)
#if GST_VERSION_MAJOR == 0
GST_FUNC(LIBGSTAPP, gst_app_sink_pull_buffer)
GST_FUNC(LIBGSTREAMER, gst_buffer_copy_metadata)
GST_FUNC(LIBGSTREAMER, gst_buffer_new_and_alloc)
GST_FUNC(LIBGSTREAMER, gst_caps_unref)
GST_FUNC(LIBGSTREAMER, gst_element_factory_get_klass)
GST_FUNC(LIBGSTREAMER, gst_element_get_pad)
GST_FUNC(LIBGSTREAMER, gst_event_parse_new_segment)
GST_FUNC(LIBGSTREAMER, gst_mini_object_get_type)
GST_FUNC(LIBGSTREAMER, gst_mini_object_new)
GST_FUNC(LIBGSTREAMER, gst_pad_add_event_probe)
GST_FUNC(LIBGSTREAMER, gst_pad_alloc_buffer)
GST_FUNC(LIBGSTREAMER, gst_pad_get_negotiated_caps)
GST_FUNC(LIBGSTREAMER, gst_pad_set_bufferalloc_function)
GST_FUNC(LIBGSTREAMER, gst_plugin_feature_get_name)
GST_FUNC(LIBGSTREAMER, gst_registry_get_default)
GST_FUNC(LIBGSTREAMER, gst_segment_set_newsegment)
GST_FUNC(LIBGSTVIDEO, gst_video_format_get_component_height)
GST_FUNC(LIBGSTVIDEO, gst_video_format_get_component_offset)
GST_FUNC(LIBGSTVIDEO, gst_video_format_get_component_width)
GST_FUNC(LIBGSTVIDEO, gst_video_format_get_pixel_stride)
GST_FUNC(LIBGSTVIDEO, gst_video_format_get_row_stride)
GST_FUNC(LIBGSTVIDEO, gst_video_format_parse_caps)
GST_FUNC(LIBGSTVIDEO, gst_video_parse_caps_pixel_aspect_ratio)
#else
GST_FUNC(LIBGSTAPP, gst_app_sink_pull_sample)
GST_FUNC(LIBGSTREAMER, _gst_caps_any)
GST_FUNC(LIBGSTREAMER, gst_allocator_get_type)
GST_FUNC(LIBGSTREAMER, gst_buffer_copy_into)
GST_FUNC(LIBGSTREAMER, gst_buffer_extract)
GST_FUNC(LIBGSTREAMER, gst_buffer_get_meta)
GST_FUNC(LIBGSTREAMER, gst_buffer_get_size)
GST_FUNC(LIBGSTREAMER, gst_buffer_map)
GST_FUNC(LIBGSTREAMER, gst_buffer_new_allocate)
GST_FUNC(LIBGSTREAMER, gst_buffer_n_memory)
GST_FUNC(LIBGSTREAMER, gst_buffer_peek_memory)
GST_FUNC(LIBGSTREAMER, gst_buffer_pool_acquire_buffer)
GST_FUNC(LIBGSTREAMER, gst_buffer_pool_config_set_allocator)
GST_FUNC(LIBGSTREAMER, gst_buffer_pool_config_set_params)
GST_FUNC(LIBGSTREAMER, gst_buffer_pool_get_config)
GST_FUNC(LIBGSTREAMER, gst_buffer_pool_get_type)
GST_FUNC(LIBGSTREAMER, gst_buffer_pool_is_active)
GST_FUNC(LIBGSTREAMER, gst_buffer_pool_set_active)
GST_FUNC(LIBGSTREAMER, gst_buffer_pool_set_config)
GST_FUNC(LIBGSTREAMER, gst_buffer_set_size)
GST_FUNC(LIBGSTREAMER, gst_buffer_unmap)
GST_FUNC(LIBGSTREAMER, gst_element_factory_get_metadata)
GST_FUNC(LIBGSTREAMER, gst_event_parse_segment)
GST_FUNC(LIBGSTREAMER, gst_event_type_get_name)
GST_FUNC(LIBGSTREAMER, gst_memory_init)
GST_FUNC(LIBGSTREAMER, gst_memory_map)
GST_FUNC(LIBGSTREAMER, gst_memory_unmap)
GST_FUNC(LIBGSTREAMER, gst_object_get_type)
GST_FUNC(LIBGSTREAMER, gst_pad_add_probe)
GST_FUNC(LIBGSTREAMER, gst_pad_get_current_caps)
GST_FUNC(LIBGSTREAMER, gst_pad_probe_info_get_query)
GST_FUNC(LIBGSTREAMER, gst_query_add_allocation_meta)
GST_FUNC(LIBGSTREAMER, gst_query_add_allocation_param)
GST_FUNC(LIBGSTREAMER, gst_query_add_allocation_pool)
GST_FUNC(LIBGSTREAMER, gst_query_parse_allocation)
GST_FUNC(LIBGSTREAMER, gst_registry_get)
GST_FUNC(LIBGSTREAMER, gst_sample_get_buffer)
GST_FUNC(LIBGSTREAMER, gst_segment_copy_into)
GST_FUNC(LIBGSTREAMER, gst_structure_free)
GST_FUNC(LIBGSTVIDEO, gst_buffer_pool_config_get_video_alignment)
GST_FUNC(LIBGSTVIDEO, gst_buffer_pool_has_option)
GST_FUNC(LIBGSTVIDEO, gst_video_buffer_pool_get_type)
GST_FUNC(LIBGSTVIDEO, gst_video_frame_map)
GST_FUNC(LIBGSTVIDEO, gst_video_frame_unmap)
GST_FUNC(LIBGSTVIDEO, gst_video_info_align)
GST_FUNC(LIBGSTVIDEO, gst_video_info_from_caps)
GST_FUNC(LIBGSTVIDEO, gst_video_info_init)
GST_FUNC(LIBGSTVIDEO, gst_video_meta_api_get_type)
GST_FUNC(LIBGSTVIDEO, gst_video_meta_map)
GST_FUNC(LIBGSTVIDEO, gst_video_meta_unmap)
#endif
/*
* Functions that have been defined in the header file. We replace them so that
* they don't try to use the global gstreamer functions.
*/
#ifdef REPLACE_FUNC
REPLACE_FUNC(gst_buffer_ref);
REPLACE_FUNC(gst_buffer_unref);
REPLACE_FUNC(gst_message_unref);
#if GST_VERSION_MAJOR == 1
REPLACE_FUNC(gst_caps_unref);
REPLACE_FUNC(gst_sample_unref);
#endif
#endif
#endif // !defined(__APPLE__)

Просмотреть файл

@ -1,145 +0,0 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include <dlfcn.h>
#include <stdio.h>
#include "nsDebug.h"
#include "GStreamerLoader.h"
#define LIBGSTREAMER 0
#define LIBGSTAPP 1
#define LIBGSTVIDEO 2
#ifdef __OpenBSD__
#define LIB_GST_SUFFIX ".so"
#else
#define LIB_GST_SUFFIX ".so.0"
#endif
namespace mozilla {
/*
* Declare our function pointers using the types from the global gstreamer
* definitions.
*/
#define GST_FUNC(_, func) typeof(::func)* func;
#define REPLACE_FUNC(func) GST_FUNC(-1, func)
#include "GStreamerFunctionList.h"
#undef GST_FUNC
#undef REPLACE_FUNC
/*
* Redefinitions of functions that have been defined in the gstreamer headers to
* stop them calling the gstreamer functions in global scope.
*/
GstBuffer * gst_buffer_ref_impl(GstBuffer *buf);
void gst_buffer_unref_impl(GstBuffer *buf);
void gst_message_unref_impl(GstMessage *msg);
void gst_caps_unref_impl(GstCaps *caps);
#if GST_VERSION_MAJOR == 1
void gst_sample_unref_impl(GstSample *sample);
#endif
bool
load_gstreamer()
{
#ifdef __APPLE__
return true;
#endif
static bool loaded = false;
if (loaded) {
return true;
}
void *gstreamerLib = nullptr;
guint major = 0;
guint minor = 0;
guint micro, nano;
typedef typeof(::gst_version) VersionFuncType;
if (VersionFuncType *versionFunc = (VersionFuncType*)dlsym(RTLD_DEFAULT, "gst_version")) {
versionFunc(&major, &minor, &micro, &nano);
}
if (major == GST_VERSION_MAJOR && minor == GST_VERSION_MINOR) {
gstreamerLib = RTLD_DEFAULT;
} else {
gstreamerLib = dlopen("libgstreamer-" GST_API_VERSION LIB_GST_SUFFIX, RTLD_NOW | RTLD_LOCAL);
}
void *handles[3] = {
gstreamerLib,
dlopen("libgstapp-" GST_API_VERSION LIB_GST_SUFFIX, RTLD_NOW | RTLD_LOCAL),
dlopen("libgstvideo-" GST_API_VERSION LIB_GST_SUFFIX, RTLD_NOW | RTLD_LOCAL)
};
for (size_t i = 0; i < sizeof(handles) / sizeof(handles[0]); i++) {
if (!handles[i]) {
NS_WARNING("Couldn't link gstreamer libraries");
goto fail;
}
}
#define GST_FUNC(lib, symbol) \
if (!(symbol = (typeof(symbol))dlsym(handles[lib], #symbol))) { \
NS_WARNING("Couldn't link symbol " #symbol); \
goto fail; \
}
#define REPLACE_FUNC(symbol) symbol = symbol##_impl;
#include "GStreamerFunctionList.h"
#undef GST_FUNC
#undef REPLACE_FUNC
loaded = true;
return true;
fail:
for (size_t i = 0; i < sizeof(handles) / sizeof(handles[0]); i++) {
if (handles[i] && handles[i] != RTLD_DEFAULT) {
dlclose(handles[i]);
}
}
return false;
}
GstBuffer *
gst_buffer_ref_impl(GstBuffer *buf)
{
return (GstBuffer *)gst_mini_object_ref(GST_MINI_OBJECT_CAST(buf));
}
void
gst_buffer_unref_impl(GstBuffer *buf)
{
gst_mini_object_unref(GST_MINI_OBJECT_CAST(buf));
}
void
gst_message_unref_impl(GstMessage *msg)
{
gst_mini_object_unref(GST_MINI_OBJECT_CAST(msg));
}
#if GST_VERSION_MAJOR == 1
void
gst_sample_unref_impl(GstSample *sample)
{
gst_mini_object_unref(GST_MINI_OBJECT_CAST(sample));
}
#endif
void
gst_caps_unref_impl(GstCaps *caps)
{
gst_mini_object_unref(GST_MINI_OBJECT_CAST(caps));
}
}

Просмотреть файл

@ -1,53 +0,0 @@
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GStreamerLoader_h_
#define GStreamerLoader_h_
#include <gst/gst.h>
#include <gst/gstbuffer.h>
#include <gst/gstelementfactory.h>
#include <gst/gststructure.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
// This include trips -Wreserved-user-defined-literal on clang. Ignoring it
// trips -Wpragmas on GCC (unknown warning), but ignoring that trips
// -Wunknown-pragmas on clang (unknown pragma).
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunknown-pragmas"
#pragma GCC diagnostic ignored "-Wpragmas"
#pragma GCC diagnostic ignored "-Wreserved-user-defined-literal"
#include <gst/video/video.h>
#pragma GCC diagnostic pop
#if GST_VERSION_MAJOR == 1
#include <gst/video/gstvideometa.h>
#include <gst/video/gstvideopool.h>
#endif
namespace mozilla {
/*
* dlopens the required libraries and dlsyms the functions we need.
* Returns true on success, false otherwise.
*/
bool load_gstreamer();
/*
* Declare our extern function pointers using the types from the global
* gstreamer definitions.
*/
#define GST_FUNC(_, func) extern typeof(::func)* func;
#define REPLACE_FUNC(func) GST_FUNC(-1, func)
#include "GStreamerFunctionList.h"
#undef GST_FUNC
#undef REPLACE_FUNC
}
#undef GST_CAPS_ANY
#define GST_CAPS_ANY (*_gst_caps_any)
#endif // GStreamerLoader_h_

Просмотреть файл

@ -1,123 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include <string.h>
#include "GStreamerReader.h"
#include "GStreamerMozVideoBuffer.h"
#include "ImageContainer.h"
namespace mozilla {
static GstMozVideoBuffer *gst_moz_video_buffer_copy(GstMozVideoBuffer* self);
static void gst_moz_video_buffer_finalize(GstMozVideoBuffer* self);
// working around GTK+ bug https://bugzilla.gnome.org/show_bug.cgi?id=723899
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-function"
G_DEFINE_TYPE(GstMozVideoBuffer, gst_moz_video_buffer, GST_TYPE_BUFFER);
#pragma GCC diagnostic pop
static void
gst_moz_video_buffer_class_init(GstMozVideoBufferClass* klass)
{
g_return_if_fail(GST_IS_MOZ_VIDEO_BUFFER_CLASS(klass));
GstMiniObjectClass *mo_class = GST_MINI_OBJECT_CLASS(klass);
mo_class->copy =(GstMiniObjectCopyFunction)gst_moz_video_buffer_copy;
mo_class->finalize =(GstMiniObjectFinalizeFunction)gst_moz_video_buffer_finalize;
}
static void
gst_moz_video_buffer_init(GstMozVideoBuffer* self)
{
g_return_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self));
}
static void
gst_moz_video_buffer_finalize(GstMozVideoBuffer* self)
{
g_return_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self));
if(self->data)
g_boxed_free(GST_TYPE_MOZ_VIDEO_BUFFER_DATA, self->data);
GST_MINI_OBJECT_CLASS(gst_moz_video_buffer_parent_class)->finalize(GST_MINI_OBJECT(self));
}
static GstMozVideoBuffer*
gst_moz_video_buffer_copy(GstMozVideoBuffer* self)
{
GstMozVideoBuffer* copy;
g_return_val_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self), nullptr);
copy = gst_moz_video_buffer_new();
/* we simply copy everything from our parent */
GST_BUFFER_DATA(GST_BUFFER_CAST(copy)) =
(guint8*)g_memdup(GST_BUFFER_DATA(GST_BUFFER_CAST(self)), GST_BUFFER_SIZE(GST_BUFFER_CAST(self)));
/* make sure it gets freed(even if the parent is subclassed, we return a
normal buffer) */
GST_BUFFER_MALLOCDATA(GST_BUFFER_CAST(copy)) = GST_BUFFER_DATA(GST_BUFFER_CAST(copy));
GST_BUFFER_SIZE(GST_BUFFER_CAST(copy)) = GST_BUFFER_SIZE(GST_BUFFER_CAST(self));
/* copy metadata */
gst_buffer_copy_metadata(GST_BUFFER_CAST(copy),
GST_BUFFER_CAST(self),
(GstBufferCopyFlags)GST_BUFFER_COPY_ALL);
/* copy videobuffer */
if(self->data)
copy->data = (GstMozVideoBufferData*)g_boxed_copy(GST_TYPE_MOZ_VIDEO_BUFFER_DATA, self->data);
return copy;
}
GstMozVideoBuffer*
gst_moz_video_buffer_new(void)
{
GstMozVideoBuffer *self;
self =(GstMozVideoBuffer*)gst_mini_object_new(GST_TYPE_MOZ_VIDEO_BUFFER);
self->data = nullptr;
return self;
}
void
gst_moz_video_buffer_set_data(GstMozVideoBuffer* self, GstMozVideoBufferData* data)
{
g_return_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self));
self->data = data;
}
GstMozVideoBufferData*
gst_moz_video_buffer_get_data(const GstMozVideoBuffer* self)
{
g_return_val_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self), nullptr);
return self->data;
}
GType
gst_moz_video_buffer_data_get_type(void)
{
static volatile gsize g_define_type_id__volatile = 0;
if(g_once_init_enter(&g_define_type_id__volatile)) {
GType g_define_type_id =
g_boxed_type_register_static(g_intern_static_string("GstMozVideoBufferData"),
(GBoxedCopyFunc)GstMozVideoBufferData::Copy,
(GBoxedFreeFunc)GstMozVideoBufferData::Free);
g_once_init_leave(&g_define_type_id__volatile, g_define_type_id);
}
return g_define_type_id__volatile;
}
}

Просмотреть файл

@ -1,61 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef __GST_MOZ_VIDEO_BUFFER_H__
#define __GST_MOZ_VIDEO_BUFFER_H__
#include <gst/gst.h>
#include "GStreamerLoader.h"
#include "MediaDecoderReader.h"
namespace mozilla {
#define GST_TYPE_MOZ_VIDEO_BUFFER_DATA (gst_moz_video_buffer_data_get_type())
#define GST_IS_MOZ_VIDEO_BUFFER_DATA(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MOZ_VIDEO_BUFFER_DATA))
#define GST_TYPE_MOZ_VIDEO_BUFFER (gst_moz_video_buffer_get_type())
#define GST_IS_MOZ_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MOZ_VIDEO_BUFFER))
#define GST_IS_MOZ_VIDEO_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MOZ_VIDEO_BUFFER))
#define GST_MOZ_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MOZ_VIDEO_BUFFER, GstMozVideoBuffer))
typedef struct _GstMozVideoBuffer GstMozVideoBuffer;
typedef struct _GstMozVideoBufferClass GstMozVideoBufferClass;
class GstMozVideoBufferData;
struct _GstMozVideoBuffer {
GstBuffer buffer;
GstMozVideoBufferData* data;
};
struct _GstMozVideoBufferClass {
GstBufferClass buffer_class;
};
GType gst_moz_video_buffer_get_type(void);
GstMozVideoBuffer* gst_moz_video_buffer_new(void);
void gst_moz_video_buffer_set_data(GstMozVideoBuffer* buf, GstMozVideoBufferData* data);
GstMozVideoBufferData* gst_moz_video_buffer_get_data(const GstMozVideoBuffer* buf);
class GstMozVideoBufferData {
public:
explicit GstMozVideoBufferData(layers::PlanarYCbCrImage* aImage) : mImage(aImage) {}
static void* Copy(void* aData) {
return new GstMozVideoBufferData(reinterpret_cast<GstMozVideoBufferData*>(aData)->mImage);
}
static void Free(void* aData) {
delete reinterpret_cast<GstMozVideoBufferData*>(aData);
}
RefPtr<layers::PlanarYCbCrImage> mImage;
};
GType gst_moz_video_buffer_data_get_type (void);
} // namespace mozilla
#endif /* __GST_MOZ_VIDEO_BUFFER_H__ */

Просмотреть файл

@ -1,203 +0,0 @@
#include "nsError.h"
#include "MediaDecoderStateMachine.h"
#include "AbstractMediaDecoder.h"
#include "MediaResource.h"
#include "GStreamerReader.h"
#include "GStreamerMozVideoBuffer.h"
#include "GStreamerFormatHelper.h"
#include "VideoUtils.h"
#include "mozilla/Endian.h"
#include "mozilla/Preferences.h"
using namespace mozilla;
using mozilla::layers::PlanarYCbCrImage;
using mozilla::layers::ImageContainer;
GstFlowReturn GStreamerReader::AllocateVideoBufferCb(GstPad* aPad,
guint64 aOffset,
guint aSize,
GstCaps* aCaps,
GstBuffer** aBuf)
{
GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(gst_pad_get_element_private(aPad));
return reader->AllocateVideoBuffer(aPad, aOffset, aSize, aCaps, aBuf);
}
GstFlowReturn GStreamerReader::AllocateVideoBuffer(GstPad* aPad,
guint64 aOffset,
guint aSize,
GstCaps* aCaps,
GstBuffer** aBuf)
{
RefPtr<PlanarYCbCrImage> image;
return AllocateVideoBufferFull(aPad, aOffset, aSize, aCaps, aBuf, image);
}
GstFlowReturn GStreamerReader::AllocateVideoBufferFull(GstPad* aPad,
guint64 aOffset,
guint aSize,
GstCaps* aCaps,
GstBuffer** aBuf,
RefPtr<PlanarYCbCrImage>& aImage)
{
/* allocate an image using the container */
ImageContainer* container = mDecoder->GetImageContainer();
if (container == nullptr) {
return GST_FLOW_ERROR;
}
RefPtr<PlanarYCbCrImage> image = container->CreatePlanarYCbCrImage();
/* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */
GstBuffer* buf = GST_BUFFER(gst_moz_video_buffer_new());
GST_BUFFER_SIZE(buf) = aSize;
/* allocate the actual YUV buffer */
GST_BUFFER_DATA(buf) = image->AllocateAndGetNewBuffer(aSize);
aImage = image;
/* create a GstMozVideoBufferData to hold the image */
GstMozVideoBufferData* bufferdata = new GstMozVideoBufferData(image);
/* Attach bufferdata to our GstMozVideoBuffer, it will take care to free it */
gst_moz_video_buffer_set_data(GST_MOZ_VIDEO_BUFFER(buf), bufferdata);
*aBuf = buf;
return GST_FLOW_OK;
}
gboolean GStreamerReader::EventProbe(GstPad* aPad, GstEvent* aEvent)
{
GstElement* parent = GST_ELEMENT(gst_pad_get_parent(aPad));
switch(GST_EVENT_TYPE(aEvent)) {
case GST_EVENT_NEWSEGMENT:
{
gboolean update;
gdouble rate;
GstFormat format;
gint64 start, stop, position;
GstSegment* segment;
/* Store the segments so we can convert timestamps to stream time, which
* is what the upper layers sync on.
*/
ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
gst_event_parse_new_segment(aEvent, &update, &rate, &format,
&start, &stop, &position);
if (parent == GST_ELEMENT(mVideoAppSink))
segment = &mVideoSegment;
else
segment = &mAudioSegment;
gst_segment_set_newsegment(segment, update, rate, format,
start, stop, position);
break;
}
case GST_EVENT_FLUSH_STOP:
/* Reset on seeks */
ResetDecode();
break;
default:
break;
}
gst_object_unref(parent);
return TRUE;
}
gboolean GStreamerReader::EventProbeCb(GstPad* aPad,
GstEvent* aEvent,
gpointer aUserData)
{
GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
return reader->EventProbe(aPad, aEvent);
}
RefPtr<PlanarYCbCrImage> GStreamerReader::GetImageFromBuffer(GstBuffer* aBuffer)
{
if (!GST_IS_MOZ_VIDEO_BUFFER (aBuffer))
return nullptr;
RefPtr<PlanarYCbCrImage> image;
GstMozVideoBufferData* bufferdata = reinterpret_cast<GstMozVideoBufferData*>(gst_moz_video_buffer_get_data(GST_MOZ_VIDEO_BUFFER(aBuffer)));
image = bufferdata->mImage;
PlanarYCbCrImage::Data data;
data.mPicX = data.mPicY = 0;
data.mPicSize = gfx::IntSize(mPicture.width, mPicture.height);
data.mStereoMode = StereoMode::MONO;
data.mYChannel = GST_BUFFER_DATA(aBuffer);
data.mYStride = gst_video_format_get_row_stride(mFormat, 0, mPicture.width);
data.mYSize = gfx::IntSize(gst_video_format_get_component_width(mFormat, 0, mPicture.width),
gst_video_format_get_component_height(mFormat, 0, mPicture.height));
data.mYSkip = 0;
data.mCbCrStride = gst_video_format_get_row_stride(mFormat, 1, mPicture.width);
data.mCbCrSize = gfx::IntSize(gst_video_format_get_component_width(mFormat, 1, mPicture.width),
gst_video_format_get_component_height(mFormat, 1, mPicture.height));
data.mCbChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 1,
mPicture.width, mPicture.height);
data.mCrChannel = data.mYChannel + gst_video_format_get_component_offset(mFormat, 2,
mPicture.width, mPicture.height);
data.mCbSkip = 0;
data.mCrSkip = 0;
image->SetDataNoCopy(data);
return image;
}
void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer,
GstBuffer** aOutBuffer,
RefPtr<PlanarYCbCrImage> &aImage)
{
AllocateVideoBufferFull(nullptr, GST_BUFFER_OFFSET(aBuffer),
GST_BUFFER_SIZE(aBuffer), nullptr, aOutBuffer, aImage);
gst_buffer_copy_metadata(*aOutBuffer, aBuffer, (GstBufferCopyFlags)GST_BUFFER_COPY_ALL);
memcpy(GST_BUFFER_DATA(*aOutBuffer), GST_BUFFER_DATA(aBuffer), GST_BUFFER_SIZE(*aOutBuffer));
aImage = GetImageFromBuffer(*aOutBuffer);
}
GstCaps* GStreamerReader::BuildAudioSinkCaps()
{
GstCaps* caps;
#if MOZ_LITTLE_ENDIAN
int endianness = 1234;
#else
int endianness = 4321;
#endif
gint width;
#ifdef MOZ_SAMPLE_TYPE_FLOAT32
caps = gst_caps_from_string("audio/x-raw-float, channels={1,2}");
width = 32;
#else /* !MOZ_SAMPLE_TYPE_FLOAT32 */
caps = gst_caps_from_string("audio/x-raw-int, channels={1,2}");
width = 16;
#endif
gst_caps_set_simple(caps,
"width", G_TYPE_INT, width,
"endianness", G_TYPE_INT, endianness,
nullptr);
return caps;
}
void GStreamerReader::InstallPadCallbacks()
{
GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink");
gst_pad_add_event_probe(sinkpad,
G_CALLBACK(&GStreamerReader::EventProbeCb), this);
gst_pad_set_bufferalloc_function(sinkpad, GStreamerReader::AllocateVideoBufferCb);
gst_pad_set_element_private(sinkpad, this);
gst_object_unref(sinkpad);
sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink");
gst_pad_add_event_probe(sinkpad,
G_CALLBACK(&GStreamerReader::EventProbeCb), this);
gst_object_unref(sinkpad);
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,263 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(GStreamerReader_h_)
#define GStreamerReader_h_
#include <map>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
// This include trips -Wreserved-user-defined-literal on clang. Ignoring it
// trips -Wpragmas on GCC (unknown warning), but ignoring that trips
// -Wunknown-pragmas on clang (unknown pragma).
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunknown-pragmas"
#pragma GCC diagnostic ignored "-Wpragmas"
#pragma GCC diagnostic ignored "-Wreserved-user-defined-literal"
#include <gst/video/video.h>
#pragma GCC diagnostic pop
#include "MediaDecoderReader.h"
#include "MediaResource.h"
#include "MP3FrameParser.h"
#include "ImageContainer.h"
#include "nsRect.h"
struct GstURIDecodeBin;
namespace mozilla {
class AbstractMediaDecoder;
class GStreamerReader : public MediaDecoderReader
{
typedef gfx::IntRect IntRect;
public:
explicit GStreamerReader(AbstractMediaDecoder* aDecoder);
virtual ~GStreamerReader();
virtual nsresult Init() override;
virtual RefPtr<ShutdownPromise> Shutdown() override;
virtual nsresult ResetDecode() override;
virtual bool DecodeAudioData() override;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold) override;
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) override;
virtual RefPtr<SeekPromise>
Seek(int64_t aTime, int64_t aEndTime) override;
virtual media::TimeIntervals GetBuffered() override;
protected:
virtual void NotifyDataArrivedInternal() override;
public:
layers::ImageContainer* GetImageContainer() { return mDecoder->GetImageContainer(); }
private:
bool HasAudio() { return mInfo.HasAudio(); }
bool HasVideo() { return mInfo.HasVideo(); }
void ReadAndPushData(guint aLength);
RefPtr<layers::PlanarYCbCrImage> GetImageFromBuffer(GstBuffer* aBuffer);
void CopyIntoImageBuffer(GstBuffer *aBuffer, GstBuffer** aOutBuffer, RefPtr<layers::PlanarYCbCrImage> &image);
GstCaps* BuildAudioSinkCaps();
void InstallPadCallbacks();
#if GST_VERSION_MAJOR >= 1
void ImageDataFromVideoFrame(GstVideoFrame *aFrame, layers::PlanarYCbCrImage::Data *aData);
#endif
/* Called once the pipeline is setup to check that the stream only contains
* supported formats
*/
nsresult CheckSupportedFormats();
/* Gst callbacks */
static GstBusSyncReply ErrorCb(GstBus *aBus, GstMessage *aMessage, gpointer aUserData);
GstBusSyncReply Error(GstBus *aBus, GstMessage *aMessage);
/*
* We attach this callback to playbin so that when uridecodebin is
* constructed, we can then list for its autoplug-sort signal to block
* list the elements it can construct.
*/
static void ElementAddedCb(GstBin *aPlayBin,
GstElement *aElement,
gpointer aUserData);
/*
* Called on the autoplug-sort signal emitted by uridecodebin for filtering
* the elements it uses.
*/
static GValueArray *ElementFilterCb(GstURIDecodeBin *aBin,
GstPad *aPad,
GstCaps *aCaps,
GValueArray *aFactories,
gpointer aUserData);
GValueArray *ElementFilter(GstURIDecodeBin *aBin,
GstPad *aPad,
GstCaps *aCaps,
GValueArray *aFactories);
/* Called on the source-setup signal emitted by playbin. Used to
* configure appsrc .
*/
static void PlayBinSourceSetupCb(GstElement* aPlayBin,
GParamSpec* pspec,
gpointer aUserData);
void PlayBinSourceSetup(GstAppSrc* aSource);
/* Called from appsrc when we need to read more data from the resource */
static void NeedDataCb(GstAppSrc* aSrc, guint aLength, gpointer aUserData);
void NeedData(GstAppSrc* aSrc, guint aLength);
/* Called when appsrc has enough data and we can stop reading */
static void EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData);
void EnoughData(GstAppSrc* aSrc);
/* Called when a seek is issued on the pipeline */
static gboolean SeekDataCb(GstAppSrc* aSrc,
guint64 aOffset,
gpointer aUserData);
gboolean SeekData(GstAppSrc* aSrc, guint64 aOffset);
/* Called when events reach the sinks. See inline comments */
#if GST_VERSION_MAJOR == 1
static GstPadProbeReturn EventProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
GstPadProbeReturn EventProbe(GstPad *aPad, GstEvent *aEvent);
#else
static gboolean EventProbeCb(GstPad* aPad, GstEvent* aEvent, gpointer aUserData);
gboolean EventProbe(GstPad* aPad, GstEvent* aEvent);
#endif
/* Called when the video part of the pipeline allocates buffers. Used to
* provide PlanarYCbCrImage backed GstBuffers to the pipeline so that a memory
* copy can be avoided when handling YUV buffers from the pipeline to the gfx
* side.
*/
#if GST_VERSION_MAJOR == 1
static GstPadProbeReturn QueryProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
GstPadProbeReturn QueryProbe(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
#else
static GstFlowReturn AllocateVideoBufferCb(GstPad* aPad, guint64 aOffset, guint aSize,
GstCaps* aCaps, GstBuffer** aBuf);
GstFlowReturn AllocateVideoBufferFull(GstPad* aPad, guint64 aOffset, guint aSize,
GstCaps* aCaps, GstBuffer** aBuf, RefPtr<layers::PlanarYCbCrImage>& aImage);
GstFlowReturn AllocateVideoBuffer(GstPad* aPad, guint64 aOffset, guint aSize,
GstCaps* aCaps, GstBuffer** aBuf);
#endif
/* Called when the pipeline is prerolled, that is when at start or after a
* seek, the first audio and video buffers are queued in the sinks.
*/
static GstFlowReturn NewPrerollCb(GstAppSink* aSink, gpointer aUserData);
void VideoPreroll();
void AudioPreroll();
/* Called when buffers reach the sinks */
static GstFlowReturn NewBufferCb(GstAppSink* aSink, gpointer aUserData);
void NewVideoBuffer();
void NewAudioBuffer();
/* Called at end of stream, when decoding has finished */
static void EosCb(GstAppSink* aSink, gpointer aUserData);
/* Notifies that a sink will no longer receive any more data. If nullptr
* is passed to this, we'll assume all streams have reached EOS (for example
* an error has occurred). */
void Eos(GstAppSink* aSink = nullptr);
/* Called when an element is added inside playbin. We use it to find the
* decodebin instance.
*/
static void PlayElementAddedCb(GstBin *aBin, GstElement *aElement,
gpointer *aUserData);
/* Called during decoding, to decide whether a (sub)stream should be decoded or
* ignored */
static bool ShouldAutoplugFactory(GstElementFactory* aFactory, GstCaps* aCaps);
/* Called by decodebin during autoplugging. We use it to apply our
* container/codec block list.
*/
static GValueArray* AutoplugSortCb(GstElement* aElement,
GstPad* aPad, GstCaps* aCaps,
GValueArray* aFactories);
// Try to find MP3 headers in this stream using our MP3 frame parser.
nsresult ParseMP3Headers();
// Get the length of the stream, excluding any metadata we have ignored at the
// start of the stream: ID3 headers, for example.
int64_t GetDataLength();
// Use our own MP3 parser here, largely for consistency with other platforms.
MP3FrameParser mMP3FrameParser;
// The byte position in the stream where the actual media (ignoring, for
// example, ID3 tags) starts.
uint64_t mDataOffset;
// We want to be able to decide in |ReadMetadata| whether or not we use the
// duration from the MP3 frame parser, as this backend supports more than just
// MP3. But |NotifyDataArrived| can update the duration and is often called
// _before_ |ReadMetadata|. This flag stops the former from using the parser
// duration until we are sure we want to.
bool mUseParserDuration;
int64_t mLastParserDuration;
#if GST_VERSION_MAJOR >= 1
GstAllocator *mAllocator;
GstBufferPool *mBufferPool;
GstVideoInfo mVideoInfo;
#endif
GstElement* mPlayBin;
GstBus* mBus;
GstAppSrc* mSource;
/* video sink bin */
GstElement* mVideoSink;
/* the actual video app sink */
GstAppSink* mVideoAppSink;
/* audio sink bin */
GstElement* mAudioSink;
/* the actual audio app sink */
GstAppSink* mAudioAppSink;
GstVideoFormat mFormat;
IntRect mPicture;
int mVideoSinkBufferCount;
int mAudioSinkBufferCount;
GstAppSrcCallbacks mSrcCallbacks;
GstAppSinkCallbacks mSinkCallbacks;
/* monitor used to synchronize access to shared state between gstreamer
* threads and other gecko threads */
ReentrantMonitor mGstThreadsMonitor;
/* video and audio segments we use to convert absolute timestamps to [0,
* stream_duration]. They're set when the pipeline is started or after a seek.
* Concurrent access guarded with mGstThreadsMonitor.
*/
GstSegment mVideoSegment;
GstSegment mAudioSegment;
/* bool used to signal when gst has detected the end of stream and
* DecodeAudioData and DecodeVideoFrame should not expect any more data
*/
bool mReachedAudioEos;
bool mReachedVideoEos;
#if GST_VERSION_MAJOR >= 1
bool mConfigureAlignment;
#endif
int fpsNum;
int fpsDen;
MediaResourceIndex mResource;
MediaByteRangeSet mLastCachedRanges;
};
} // namespace mozilla
#endif

Просмотреть файл

@ -1,38 +0,0 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
EXPORTS += [
'GStreamerDecoder.h',
'GStreamerFormatHelper.h',
'GStreamerLoader.h',
'GStreamerReader.h',
]
SOURCES += [
'GStreamerDecoder.cpp',
'GStreamerFormatHelper.cpp',
'GStreamerLoader.cpp',
'GStreamerReader.cpp',
]
if CONFIG['GST_API_VERSION'] == '1.0':
SOURCES += [
'GStreamerAllocator.cpp',
]
else:
SOURCES += [
'GStreamerMozVideoBuffer.cpp',
'GStreamerReader-0.10.cpp',
]
FINAL_LIBRARY = 'xul'
LOCAL_INCLUDES += [
'/dom/base',
'/dom/html',
]
CFLAGS += CONFIG['GSTREAMER_CFLAGS']
CXXFLAGS += CONFIG['GSTREAMER_CFLAGS']

Просмотреть файл

@ -42,9 +42,6 @@ DIRS += [
if CONFIG['MOZ_RAW']:
DIRS += ['raw']
if CONFIG['MOZ_GSTREAMER']:
DIRS += ['gstreamer']
if CONFIG['MOZ_DIRECTSHOW']:
DIRS += ['directshow']
@ -312,9 +309,6 @@ if CONFIG['ANDROID_VERSION'] > '15':
if CONFIG['MOZ_GONK_MEDIACODEC']:
DEFINES['MOZ_GONK_MEDIACODEC'] = True
CFLAGS += CONFIG['GSTREAMER_CFLAGS']
CXXFLAGS += CONFIG['GSTREAMER_CFLAGS']
include('/ipc/chromium/chromium-config.mozbuild')
# Suppress some GCC warnings being treated as errors:

Просмотреть файл

@ -110,44 +110,6 @@ public:
return MediaCodecDataDecoder::Input(aSample);
}
bool WantCopy() const
{
// Allocating a texture is incredibly slow on PowerVR and may fail on
// emulators, see bug 1190379.
return mGLContext->Vendor() != GLVendor::Imagination &&
mGLContext->Renderer() != GLRenderer::AndroidEmulator;
}
EGLImage CopySurface(layers::Image* img)
{
mGLContext->MakeCurrent();
GLuint tex = CreateTextureForOffscreen(mGLContext, mGLContext->GetGLFormats(),
img->GetSize());
auto helper = mGLContext->BlitHelper();
const gl::OriginPos destOrigin = gl::OriginPos::TopLeft;
if (!helper->BlitImageToTexture(img, img->GetSize(), tex,
LOCAL_GL_TEXTURE_2D, destOrigin))
{
mGLContext->fDeleteTextures(1, &tex);
return nullptr;
}
EGLint attribs[] = {
LOCAL_EGL_IMAGE_PRESERVED_KHR, LOCAL_EGL_TRUE,
LOCAL_EGL_NONE, LOCAL_EGL_NONE
};
EGLContext eglContext = static_cast<GLContextEGL*>(mGLContext.get())->mContext;
EGLImage eglImage = sEGLLibrary.fCreateImage(
EGL_DISPLAY(), eglContext, LOCAL_EGL_GL_TEXTURE_2D_KHR,
reinterpret_cast<EGLClientBuffer>(tex), attribs);
mGLContext->fDeleteTextures(1, &tex);
return eglImage;
}
nsresult PostOutput(BufferInfo::Param aInfo, MediaFormat::Param aFormat,
const TimeUnit& aDuration) override
{
@ -159,30 +121,6 @@ public:
new SurfaceTextureImage(mSurfaceTexture.get(), mConfig.mDisplay,
gl::OriginPos::BottomLeft);
if (WantCopy()) {
EGLImage eglImage = CopySurface(img);
if (!eglImage) {
return NS_ERROR_FAILURE;
}
EGLSync eglSync = nullptr;
if (sEGLLibrary.IsExtensionSupported(GLLibraryEGL::KHR_fence_sync) &&
mGLContext->IsExtensionSupported(GLContext::OES_EGL_sync))
{
MOZ_ASSERT(mGLContext->IsCurrent());
eglSync = sEGLLibrary.fCreateSync(EGL_DISPLAY(),
LOCAL_EGL_SYNC_FENCE,
nullptr);
MOZ_ASSERT(eglSync);
mGLContext->fFlush();
} else {
NS_WARNING("No EGL fence support detected, rendering artifacts may occur!");
}
img = new layers::EGLImageImage(eglImage, eglSync, mConfig.mDisplay,
gl::OriginPos::TopLeft, true /* owns */);
}
nsresult rv;
int32_t flags;
NS_ENSURE_SUCCESS(rv = aInfo->Flags(&flags), rv);

Просмотреть файл

@ -128,7 +128,6 @@ var haveMp4 = (getPref("media.wmf.enabled") && IsWindowsVistaOrLater()) ||
IsMacOSSnowLeopardOrLater() ||
IsJellyBeanOrLater() ||
getPref("media.omx.enabled") ||
getPref("media.gstreamer.enabled") ||
getPref("media.ffmpeg.enabled");
check_mp4(document.getElementById('v'), haveMp4);
@ -136,7 +135,6 @@ check_mp4(document.getElementById('v'), haveMp4);
var haveMp3 = getPref("media.directshow.enabled") ||
(getPref("media.wmf.enabled") && IsWindowsVistaOrLater()) ||
(IsJellyBeanOrLater() && getPref("media.android-media-codec.enabled")) ||
getPref("media.gstreamer.enabled") ||
getPref("media.ffmpeg.enabled") ||
getPref("media.apple.mp3.enabled");
check_mp3(document.getElementById('v'), haveMp3);

Просмотреть файл

@ -1,472 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "WebMReader.h"
#ifdef MOZ_TREMOR
#include "tremor/ivorbiscodec.h"
#else
#include "vorbis/codec.h"
#endif
#include "OpusParser.h"
#include "VorbisUtils.h"
#include "OggReader.h"
#undef LOG
#ifdef PR_LOGGING
#include "prprf.h"
#define LOG(type, msg) MOZ_LOG(gMediaDecoderLog, type, msg)
#else
#define LOG(type, msg)
#endif
namespace mozilla {
extern LazyLogModule gMediaDecoderLog;
ogg_packet InitOggPacket(const unsigned char* aData, size_t aLength,
bool aBOS, bool aEOS,
int64_t aGranulepos, int64_t aPacketNo)
{
ogg_packet packet;
packet.packet = const_cast<unsigned char*>(aData);
packet.bytes = aLength;
packet.b_o_s = aBOS;
packet.e_o_s = aEOS;
packet.granulepos = aGranulepos;
packet.packetno = aPacketNo;
return packet;
}
class VorbisDecoder : public WebMAudioDecoder
{
public:
nsresult Init() override;
void Shutdown() override;
nsresult ResetDecode() override;
nsresult DecodeHeader(const unsigned char* aData, size_t aLength) override;
nsresult FinishInit(AudioInfo& aInfo) override;
bool Decode(const unsigned char* aData, size_t aLength,
int64_t aOffset, uint64_t aTstampUsecs,
int64_t aDiscardPadding, int32_t* aTotalFrames) override;
explicit VorbisDecoder(WebMReader* aReader);
~VorbisDecoder();
private:
RefPtr<WebMReader> mReader;
// Vorbis decoder state
vorbis_info mVorbisInfo;
vorbis_comment mVorbisComment;
vorbis_dsp_state mVorbisDsp;
vorbis_block mVorbisBlock;
int64_t mPacketCount;
};
VorbisDecoder::VorbisDecoder(WebMReader* aReader)
: mReader(aReader)
, mPacketCount(0)
{
// Zero these member vars to avoid crashes in Vorbis clear functions when
// destructor is called before |Init|.
PodZero(&mVorbisBlock);
PodZero(&mVorbisDsp);
PodZero(&mVorbisInfo);
PodZero(&mVorbisComment);
}
VorbisDecoder::~VorbisDecoder()
{
vorbis_block_clear(&mVorbisBlock);
vorbis_dsp_clear(&mVorbisDsp);
vorbis_info_clear(&mVorbisInfo);
vorbis_comment_clear(&mVorbisComment);
}
void
VorbisDecoder::Shutdown()
{
mReader = nullptr;
}
nsresult
VorbisDecoder::Init()
{
vorbis_info_init(&mVorbisInfo);
vorbis_comment_init(&mVorbisComment);
PodZero(&mVorbisDsp);
PodZero(&mVorbisBlock);
return NS_OK;
}
nsresult
VorbisDecoder::ResetDecode()
{
// Ignore failed results from vorbis_synthesis_restart. They
// aren't fatal and it fails when ResetDecode is called at a
// time when no vorbis data has been read.
vorbis_synthesis_restart(&mVorbisDsp);
return NS_OK;
}
nsresult
VorbisDecoder::DecodeHeader(const unsigned char* aData, size_t aLength)
{
bool bos = mPacketCount == 0;
ogg_packet pkt = InitOggPacket(aData, aLength, bos, false, 0, mPacketCount++);
MOZ_ASSERT(mPacketCount <= 3);
int r = vorbis_synthesis_headerin(&mVorbisInfo,
&mVorbisComment,
&pkt);
return r == 0 ? NS_OK : NS_ERROR_FAILURE;
}
nsresult
VorbisDecoder::FinishInit(AudioInfo& aInfo)
{
MOZ_ASSERT(mPacketCount == 3);
int r = vorbis_synthesis_init(&mVorbisDsp, &mVorbisInfo);
if (r) {
return NS_ERROR_FAILURE;
}
r = vorbis_block_init(&mVorbisDsp, &mVorbisBlock);
if (r) {
return NS_ERROR_FAILURE;
}
aInfo.mRate = mVorbisDsp.vi->rate;
aInfo.mChannels = mVorbisDsp.vi->channels;
return NS_OK;
}
bool
VorbisDecoder::Decode(const unsigned char* aData, size_t aLength,
int64_t aOffset, uint64_t aTstampUsecs,
int64_t aDiscardPadding, int32_t* aTotalFrames)
{
MOZ_ASSERT(mPacketCount >= 3);
ogg_packet pkt = InitOggPacket(aData, aLength, false, false, -1, mPacketCount++);
bool first_packet = mPacketCount == 4;
if (vorbis_synthesis(&mVorbisBlock, &pkt)) {
return false;
}
if (vorbis_synthesis_blockin(&mVorbisDsp,
&mVorbisBlock)) {
return false;
}
VorbisPCMValue** pcm = 0;
int32_t frames = vorbis_synthesis_pcmout(&mVorbisDsp, &pcm);
// If the first packet of audio in the media produces no data, we
// still need to produce an AudioData for it so that the correct media
// start time is calculated. Otherwise we'd end up with a media start
// time derived from the timecode of the first packet that produced
// data.
if (frames == 0 && first_packet) {
mReader->AudioQueue().Push(new AudioData(aOffset, aTstampUsecs, 0, 0, nullptr,
mVorbisDsp.vi->channels,
mVorbisDsp.vi->rate));
}
while (frames > 0) {
uint32_t channels = mVorbisDsp.vi->channels;
auto buffer = MakeUnique<AudioDataValue[]>(frames*channels);
for (uint32_t j = 0; j < channels; ++j) {
VorbisPCMValue* channel = pcm[j];
for (uint32_t i = 0; i < uint32_t(frames); ++i) {
buffer[i*channels + j] = MOZ_CONVERT_VORBIS_SAMPLE(channel[i]);
}
}
CheckedInt64 duration = FramesToUsecs(frames, mVorbisDsp.vi->rate);
if (!duration.isValid()) {
NS_WARNING("Int overflow converting WebM audio duration");
return false;
}
CheckedInt64 total_duration = FramesToUsecs(*aTotalFrames,
mVorbisDsp.vi->rate);
if (!total_duration.isValid()) {
NS_WARNING("Int overflow converting WebM audio total_duration");
return false;
}
CheckedInt64 time = total_duration + aTstampUsecs;
if (!time.isValid()) {
NS_WARNING("Int overflow adding total_duration and aTstampUsecs");
return false;
};
*aTotalFrames += frames;
mReader->AudioQueue().Push(new AudioData(aOffset,
time.value(),
duration.value(),
frames,
Move(buffer),
mVorbisDsp.vi->channels,
mVorbisDsp.vi->rate));
if (vorbis_synthesis_read(&mVorbisDsp, frames)) {
return false;
}
frames = vorbis_synthesis_pcmout(&mVorbisDsp, &pcm);
}
return true;
}
// ------------------------------------------------------------------------
class OpusDecoder : public WebMAudioDecoder
{
public:
nsresult Init() override;
void Shutdown() override;
nsresult ResetDecode() override;
nsresult DecodeHeader(const unsigned char* aData, size_t aLength) override;
nsresult FinishInit(AudioInfo& aInfo) override;
bool Decode(const unsigned char* aData, size_t aLength,
int64_t aOffset, uint64_t aTstampUsecs,
int64_t aDiscardPadding, int32_t* aTotalFrames) override;
explicit OpusDecoder(WebMReader* aReader);
~OpusDecoder();
private:
RefPtr<WebMReader> mReader;
// Opus decoder state
nsAutoPtr<OpusParser> mOpusParser;
OpusMSDecoder* mOpusDecoder;
uint16_t mSkip; // Samples left to trim before playback.
bool mDecodedHeader;
// Opus padding should only be discarded on the final packet. Once this
// is set to true, if the reader attempts to decode any further packets it
// will raise an error so we can indicate that the file is invalid.
bool mPaddingDiscarded;
};
OpusDecoder::OpusDecoder(WebMReader* aReader)
: mReader(aReader)
, mOpusDecoder(nullptr)
, mSkip(0)
, mDecodedHeader(false)
, mPaddingDiscarded(false)
{
}
OpusDecoder::~OpusDecoder()
{
if (mOpusDecoder) {
opus_multistream_decoder_destroy(mOpusDecoder);
mOpusDecoder = nullptr;
}
}
void
OpusDecoder::Shutdown()
{
mReader = nullptr;
}
nsresult
OpusDecoder::Init()
{
return NS_OK;
}
nsresult
OpusDecoder::ResetDecode()
{
if (mOpusDecoder) {
// Reset the decoder.
opus_multistream_decoder_ctl(mOpusDecoder, OPUS_RESET_STATE);
mSkip = mOpusParser->mPreSkip;
mPaddingDiscarded = false;
}
return NS_OK;
}
nsresult
OpusDecoder::DecodeHeader(const unsigned char* aData, size_t aLength)
{
MOZ_ASSERT(!mOpusParser);
MOZ_ASSERT(!mOpusDecoder);
MOZ_ASSERT(!mDecodedHeader);
mDecodedHeader = true;
mOpusParser = new OpusParser;
if (!mOpusParser->DecodeHeader(const_cast<unsigned char*>(aData), aLength)) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult
OpusDecoder::FinishInit(AudioInfo& aInfo)
{
MOZ_ASSERT(mDecodedHeader);
int r;
mOpusDecoder = opus_multistream_decoder_create(mOpusParser->mRate,
mOpusParser->mChannels,
mOpusParser->mStreams,
mOpusParser->mCoupledStreams,
mOpusParser->mMappingTable,
&r);
mSkip = mOpusParser->mPreSkip;
mPaddingDiscarded = false;
if (int64_t(mReader->GetCodecDelay()) != FramesToUsecs(mOpusParser->mPreSkip,
mOpusParser->mRate).value()) {
LOG(LogLevel::Warning,
("Invalid Opus header: CodecDelay and pre-skip do not match!"));
return NS_ERROR_FAILURE;
}
aInfo.mRate = mOpusParser->mRate;
aInfo.mChannels = mOpusParser->mChannels;
return r == OPUS_OK ? NS_OK : NS_ERROR_FAILURE;
}
bool
OpusDecoder::Decode(const unsigned char* aData, size_t aLength,
int64_t aOffset, uint64_t aTstampUsecs,
int64_t aDiscardPadding, int32_t* aTotalFrames)
{
uint32_t channels = mOpusParser->mChannels;
// No channel mapping for more than 8 channels.
if (channels > 8) {
return false;
}
if (mPaddingDiscarded) {
// Discard padding should be used only on the final packet, so
// decoding after a padding discard is invalid.
LOG(LogLevel::Debug, ("Opus error, discard padding on interstitial packet"));
return false;
}
// Maximum value is 63*2880, so there's no chance of overflow.
int32_t frames_number = opus_packet_get_nb_frames(aData, aLength);
if (frames_number <= 0) {
return false; // Invalid packet header.
}
int32_t samples =
opus_packet_get_samples_per_frame(aData, opus_int32(mOpusParser->mRate));
// A valid Opus packet must be between 2.5 and 120 ms long (48kHz).
int32_t frames = frames_number*samples;
if (frames < 120 || frames > 5760)
return false;
auto buffer = MakeUnique<AudioDataValue[]>(frames * channels);
// Decode to the appropriate sample type.
#ifdef MOZ_SAMPLE_TYPE_FLOAT32
int ret = opus_multistream_decode_float(mOpusDecoder,
aData, aLength,
buffer.get(), frames, false);
#else
int ret = opus_multistream_decode(mOpusDecoder,
aData, aLength,
buffer.get(), frames, false);
#endif
if (ret < 0)
return false;
NS_ASSERTION(ret == frames, "Opus decoded too few audio samples");
CheckedInt64 startTime = aTstampUsecs;
// Trim the initial frames while the decoder is settling.
if (mSkip > 0) {
int32_t skipFrames = std::min<int32_t>(mSkip, frames);
int32_t keepFrames = frames - skipFrames;
LOG(LogLevel::Debug, ("Opus decoder skipping %d of %d frames",
skipFrames, frames));
PodMove(buffer.get(),
buffer.get() + skipFrames * channels,
keepFrames * channels);
startTime = startTime + FramesToUsecs(skipFrames, mOpusParser->mRate);
frames = keepFrames;
mSkip -= skipFrames;
}
if (aDiscardPadding < 0) {
// Negative discard padding is invalid.
LOG(LogLevel::Debug, ("Opus error, negative discard padding"));
return false;
}
if (aDiscardPadding > 0) {
CheckedInt64 discardFrames = UsecsToFrames(aDiscardPadding / NS_PER_USEC,
mOpusParser->mRate);
if (!discardFrames.isValid()) {
NS_WARNING("Int overflow in DiscardPadding");
return false;
}
if (discardFrames.value() > frames) {
// Discarding more than the entire packet is invalid.
LOG(LogLevel::Debug, ("Opus error, discard padding larger than packet"));
return false;
}
LOG(LogLevel::Debug, ("Opus decoder discarding %d of %d frames",
int32_t(discardFrames.value()), frames));
// Padding discard is only supposed to happen on the final packet.
// Record the discard so we can return an error if another packet is
// decoded.
mPaddingDiscarded = true;
int32_t keepFrames = frames - discardFrames.value();
frames = keepFrames;
}
// Apply the header gain if one was specified.
#ifdef MOZ_SAMPLE_TYPE_FLOAT32
if (mOpusParser->mGain != 1.0f) {
float gain = mOpusParser->mGain;
int samples = frames * channels;
for (int i = 0; i < samples; i++) {
buffer[i] *= gain;
}
}
#else
if (mOpusParser->mGain_Q16 != 65536) {
int64_t gain_Q16 = mOpusParser->mGain_Q16;
int samples = frames * channels;
for (int i = 0; i < samples; i++) {
int32_t val = static_cast<int32_t>((gain_Q16*buffer[i] + 32768)>>16);
buffer[i] = static_cast<AudioDataValue>(MOZ_CLIP_TO_15(val));
}
}
#endif
CheckedInt64 duration = FramesToUsecs(frames, mOpusParser->mRate);
if (!duration.isValid()) {
NS_WARNING("Int overflow converting WebM audio duration");
return false;
}
CheckedInt64 time = startTime - mReader->GetCodecDelay();
if (!time.isValid()) {
NS_WARNING("Int overflow shifting tstamp by codec delay");
return false;
};
mReader->AudioQueue().Push(new AudioData(aOffset,
time.value(),
duration.value(),
frames,
Move(buffer),
mOpusParser->mChannels,
mOpusParser->mRate));
return true;
}
} // namespace mozilla

Просмотреть файл

@ -4,6 +4,7 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "EbmlComposer.h"
#include "mozilla/UniquePtr.h"
#include "libmkv/EbmlIDs.h"
#include "libmkv/EbmlWriter.h"
#include "libmkv/WebMElement.h"
@ -22,7 +23,7 @@ void EbmlComposer::GenerateHeader()
EbmlGlobal ebml;
// The WEbM header default size usually smaller than 1k.
auto buffer = MakeUnique<uint8_t[]>(DEFAULT_HEADER_SIZE +
mCodecPrivateData.Length());
mCodecPrivateData.Length());
ebml.buf = buffer.get();
ebml.offset = 0;
writeHeader(&ebml);

Просмотреть файл

@ -1,233 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "SoftwareWebMVideoDecoder.h"
#include "AbstractMediaDecoder.h"
#include "gfx2DGlue.h"
#include "MediaDecoderStateMachine.h"
#include "MediaResource.h"
#include "nsError.h"
#include "OggReader.h"
#include "TimeUnits.h"
#include "VorbisUtils.h"
#include "WebMBufferedParser.h"
#include "NesteggPacketHolder.h"
#include <algorithm>
#define VPX_DONT_DEFINE_STDINT_TYPES
#include "vpx/vp8dx.h"
#include "vpx/vpx_decoder.h"
namespace mozilla {
using namespace gfx;
using namespace layers;
SoftwareWebMVideoDecoder::SoftwareWebMVideoDecoder(WebMReader* aReader)
: WebMVideoDecoder(),
mReader(aReader)
{
MOZ_COUNT_CTOR(SoftwareWebMVideoDecoder);
PodZero(&mVPX);
}
SoftwareWebMVideoDecoder::~SoftwareWebMVideoDecoder()
{
MOZ_COUNT_DTOR(SoftwareWebMVideoDecoder);
}
void
SoftwareWebMVideoDecoder::Shutdown()
{
vpx_codec_destroy(&mVPX);
mReader = nullptr;
}
/* static */
WebMVideoDecoder*
SoftwareWebMVideoDecoder::Create(WebMReader* aReader)
{
return new SoftwareWebMVideoDecoder(aReader);
}
nsresult
SoftwareWebMVideoDecoder::Init(unsigned int aWidth, unsigned int aHeight)
{
return InitDecoder(aWidth, aHeight);
}
nsresult
SoftwareWebMVideoDecoder::InitDecoder(unsigned int aWidth, unsigned int aHeight)
{
vpx_codec_iface_t* dx = nullptr;
switch(mReader->GetVideoCodec()) {
case NESTEGG_CODEC_VP8:
dx = vpx_codec_vp8_dx();
break;
case NESTEGG_CODEC_VP9:
dx = vpx_codec_vp9_dx();
break;
}
if (!dx || vpx_codec_dec_init(&mVPX, dx, nullptr, 0)) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
bool
SoftwareWebMVideoDecoder::DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold)
{
MOZ_ASSERT(mReader->OnTaskQueue());
// Record number of frames decoded and parsed. Automatically update the
// stats counters using the AutoNotifyDecoded stack-based class.
AbstractMediaDecoder::AutoNotifyDecoded a(mReader->GetDecoder());
RefPtr<NesteggPacketHolder> holder(mReader->NextPacket(WebMReader::VIDEO));
if (!holder) {
return false;
}
nestegg_packet* packet = holder->Packet();
unsigned int track = 0;
int r = nestegg_packet_track(packet, &track);
if (r == -1) {
return false;
}
unsigned int count = 0;
r = nestegg_packet_count(packet, &count);
if (r == -1) {
return false;
}
if (count > 1) {
NS_WARNING("Packet contains more than one video frame");
return false;
}
int64_t tstamp = holder->Timestamp();
// The end time of this frame is the start time of the next frame. Fetch
// the timestamp of the next packet for this track. If we've reached the
// end of the resource, use the file's duration as the end time of this
// video frame.
int64_t next_tstamp = 0;
RefPtr<NesteggPacketHolder> next_holder(mReader->NextPacket(WebMReader::VIDEO));
if (next_holder) {
next_tstamp = next_holder->Timestamp();
mReader->PushVideoPacket(next_holder);
} else {
next_tstamp = tstamp;
next_tstamp += tstamp - mReader->GetLastVideoFrameTime();
}
mReader->SetLastVideoFrameTime(tstamp);
unsigned char* data;
size_t length;
r = nestegg_packet_data(packet, 0, &data, &length);
if (r == -1) {
return false;
}
vpx_codec_stream_info_t si;
PodZero(&si);
si.sz = sizeof(si);
if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP8) {
vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
} else if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP9) {
vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
}
if (aKeyframeSkip && (!si.is_kf || tstamp < aTimeThreshold)) {
// Skipping to next keyframe...
a.mParsed++;
a.mDropped++;
return true;
}
if (aKeyframeSkip && si.is_kf) {
aKeyframeSkip = false;
}
if (vpx_codec_decode(&mVPX, data, length, nullptr, 0)) {
return false;
}
// If the timestamp of the video frame is less than
// the time threshold required then it is not added
// to the video queue and won't be displayed.
if (tstamp < aTimeThreshold) {
a.mParsed++;
a.mDropped++;
return true;
}
vpx_codec_iter_t iter = nullptr;
vpx_image_t *img;
while ((img = vpx_codec_get_frame(&mVPX, &iter))) {
NS_ASSERTION(img->fmt == VPX_IMG_FMT_I420, "WebM image format not I420");
// Chroma shifts are rounded down as per the decoding examples in the SDK
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = img->planes[0];
b.mPlanes[0].mStride = img->stride[0];
b.mPlanes[0].mHeight = img->d_h;
b.mPlanes[0].mWidth = img->d_w;
b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0;
b.mPlanes[1].mData = img->planes[1];
b.mPlanes[1].mStride = img->stride[1];
b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0;
b.mPlanes[2].mData = img->planes[2];
b.mPlanes[2].mStride = img->stride[2];
b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;
nsIntRect pictureRect = mReader->GetPicture();
IntRect picture = pictureRect;
nsIntSize initFrame = mReader->GetInitialFrame();
if (img->d_w != static_cast<uint32_t>(initFrame.width) ||
img->d_h != static_cast<uint32_t>(initFrame.height)) {
// Frame size is different from what the container reports. This is
// legal in WebM, and we will preserve the ratio of the crop rectangle
// as it was reported relative to the picture size reported by the
// container.
picture.x = (pictureRect.x * img->d_w) / initFrame.width;
picture.y = (pictureRect.y * img->d_h) / initFrame.height;
picture.width = (img->d_w * pictureRect.width) / initFrame.width;
picture.height = (img->d_h * pictureRect.height) / initFrame.height;
}
VideoInfo videoInfo = mReader->GetMediaInfo().mVideo;
RefPtr<VideoData> v = VideoData::Create(videoInfo,
mReader->GetDecoder()->GetImageContainer(),
holder->Offset(),
tstamp,
next_tstamp - tstamp,
b,
si.is_kf,
-1,
picture);
if (!v) {
return false;
}
a.mParsed++;
a.mDecoded++;
NS_ASSERTION(a.mDecoded <= a.mParsed,
"Expect only 1 frame per chunk per packet in WebM...");
mReader->VideoQueue().Push(v);
}
return true;
}
} // namespace mozilla

Просмотреть файл

@ -1,41 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(SoftwareWebMVideoDecoder_h_)
#define SoftwareWebMVideoDecoder_h_
#include <stdint.h>
#include "WebMReader.h"
namespace mozilla {
class SoftwareWebMVideoDecoder : public WebMVideoDecoder
{
public:
static WebMVideoDecoder* Create(WebMReader* aReader);
virtual nsresult Init(unsigned int aWidth = 0,
unsigned int aHeight = 0) override;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold) override;
virtual void Shutdown() override;
explicit SoftwareWebMVideoDecoder(WebMReader* aReader);
~SoftwareWebMVideoDecoder();
private:
nsresult InitDecoder(unsigned int aWidth, unsigned int aHeight);
RefPtr<WebMReader> mReader;
// VPx decoder state
vpx_codec_ctx_t mVPX;
};
} // namespace mozilla
#endif

Просмотреть файл

@ -8,7 +8,6 @@
#include "MediaDecoderStateMachine.h"
#include "MediaFormatReader.h"
#include "WebMDemuxer.h"
#include "WebMReader.h"
#include "WebMDecoder.h"
#include "VideoUtils.h"
@ -16,11 +15,8 @@ namespace mozilla {
MediaDecoderStateMachine* WebMDecoder::CreateStateMachine()
{
bool useFormatDecoder =
Preferences::GetBool("media.format-reader.webm", true);
RefPtr<MediaDecoderReader> reader = useFormatDecoder ?
static_cast<MediaDecoderReader*>(new MediaFormatReader(this, new WebMDemuxer(GetResource()), GetVideoFrameContainer())) :
new WebMReader(this);
RefPtr<MediaDecoderReader> reader =
new MediaFormatReader(this, new WebMDemuxer(GetResource()), GetVideoFrameContainer());
return new MediaDecoderStateMachine(this, reader);
}

Просмотреть файл

@ -34,7 +34,7 @@ namespace mozilla {
using namespace gfx;
LazyLogModule gWebMDemuxerLog("WebMDemuxer");
extern LazyLogModule gNesteggLog;
LazyLogModule gNesteggLog("Nestegg");
// How far ahead will we look when searching future keyframe. In microseconds.
// This value is based on what appears to be a reasonable value as most webm

Просмотреть файл

@ -1,828 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "nsError.h"
#include "MediaDecoderStateMachine.h"
#include "AbstractMediaDecoder.h"
#include "SoftwareWebMVideoDecoder.h"
#include "nsContentUtils.h"
#include "WebMReader.h"
#include "WebMBufferedParser.h"
#include "gfx2DGlue.h"
#include "Layers.h"
#include "mozilla/Preferences.h"
#include "mozilla/SharedThreadPool.h"
#include <algorithm>
#define VPX_DONT_DEFINE_STDINT_TYPES
#include "vpx/vp8dx.h"
#include "vpx/vpx_decoder.h"
// Un-comment to enable logging of seek bisections.
//#define SEEK_LOGGING
#undef LOG
#include "prprf.h"
#define LOG(type, msg) MOZ_LOG(gMediaDecoderLog, type, msg)
#ifdef SEEK_LOGGING
#define SEEK_LOG(type, msg) MOZ_LOG(gMediaDecoderLog, type, msg)
#else
#define SEEK_LOG(type, msg)
#endif
namespace mozilla {
using namespace gfx;
using namespace layers;
using namespace media;
extern LazyLogModule gMediaDecoderLog;
LazyLogModule gNesteggLog("Nestegg");
// Functions for reading and seeking using MediaResource required for
// nestegg_io. The 'user data' passed to these functions is the
// decoder from which the media resource is obtained.
static int webm_read(void *aBuffer, size_t aLength, void *aUserData)
{
MOZ_ASSERT(aUserData);
MediaResourceIndex* resource =
reinterpret_cast<MediaResourceIndex*>(aUserData);
nsresult rv = NS_OK;
uint32_t bytes = 0;
rv = resource->Read(static_cast<char *>(aBuffer), aLength, &bytes);
bool eof = !bytes;
return NS_FAILED(rv) ? -1 : eof ? 0 : 1;
}
static int webm_seek(int64_t aOffset, int aWhence, void *aUserData)
{
MOZ_ASSERT(aUserData);
MediaResourceIndex* resource =
reinterpret_cast<MediaResourceIndex*>(aUserData);
nsresult rv = resource->Seek(aWhence, aOffset);
return NS_SUCCEEDED(rv) ? 0 : -1;
}
static int64_t webm_tell(void *aUserData)
{
MOZ_ASSERT(aUserData);
MediaResourceIndex* resource =
reinterpret_cast<MediaResourceIndex*>(aUserData);
return resource->Tell();
}
static void webm_log(nestegg * context,
unsigned int severity,
char const * format, ...)
{
if (!MOZ_LOG_TEST(gNesteggLog, LogLevel::Debug)) {
return;
}
va_list args;
char msg[256];
const char * sevStr;
switch(severity) {
case NESTEGG_LOG_DEBUG:
sevStr = "DBG";
break;
case NESTEGG_LOG_INFO:
sevStr = "INF";
break;
case NESTEGG_LOG_WARNING:
sevStr = "WRN";
break;
case NESTEGG_LOG_ERROR:
sevStr = "ERR";
break;
case NESTEGG_LOG_CRITICAL:
sevStr = "CRT";
break;
default:
sevStr = "UNK";
break;
}
va_start(args, format);
PR_snprintf(msg, sizeof(msg), "%p [Nestegg-%s] ", context, sevStr);
PR_vsnprintf(msg+strlen(msg), sizeof(msg)-strlen(msg), format, args);
MOZ_LOG(gNesteggLog, LogLevel::Debug, (msg));
va_end(args);
}
WebMReader::WebMReader(AbstractMediaDecoder* aDecoder)
: MediaDecoderReader(aDecoder)
, mContext(nullptr)
, mVideoTrack(0)
, mAudioTrack(0)
, mAudioStartUsec(-1)
, mAudioFrames(0)
, mSeekPreroll(0)
, mLastVideoFrameTime(0)
, mAudioCodec(-1)
, mVideoCodec(-1)
, mLayersBackendType(layers::LayersBackend::LAYERS_NONE)
, mHasVideo(false)
, mHasAudio(false)
, mResource(aDecoder->GetResource())
{
MOZ_COUNT_CTOR(WebMReader);
}
WebMReader::~WebMReader()
{
Cleanup();
mVideoPackets.Reset();
mAudioPackets.Reset();
MOZ_ASSERT(!mAudioDecoder);
MOZ_ASSERT(!mVideoDecoder);
MOZ_COUNT_DTOR(WebMReader);
}
RefPtr<ShutdownPromise>
WebMReader::Shutdown()
{
if (mAudioDecoder) {
mAudioDecoder->Shutdown();
mAudioDecoder = nullptr;
}
if (mVideoDecoder) {
mVideoDecoder->Shutdown();
mVideoDecoder = nullptr;
}
return MediaDecoderReader::Shutdown();
}
nsresult WebMReader::Init()
{
mBufferedState = new WebMBufferedState;
return NS_OK;
}
void WebMReader::InitLayersBackendType()
{
if (!IsVideoContentType(GetDecoder()->GetResource()->GetContentType())) {
// Not playing video, we don't care about the layers backend type.
return;
}
// Extract the layer manager backend type so that platform decoders
// can determine whether it's worthwhile using hardware accelerated
// video decoding.
MediaDecoderOwner* owner = mDecoder->GetOwner();
if (!owner) {
NS_WARNING("WebMReader without a decoder owner, can't get HWAccel");
return;
}
dom::HTMLMediaElement* element = owner->GetMediaElement();
NS_ENSURE_TRUE_VOID(element);
RefPtr<LayerManager> layerManager =
nsContentUtils::LayerManagerForDocument(element->OwnerDoc());
NS_ENSURE_TRUE_VOID(layerManager);
mLayersBackendType = layerManager->GetCompositorBackendType();
}
nsresult WebMReader::ResetDecode()
{
mAudioFrames = 0;
mAudioStartUsec = -1;
nsresult res = NS_OK;
if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
res = NS_ERROR_FAILURE;
}
if (mAudioDecoder) {
mAudioDecoder->ResetDecode();
}
mVideoPackets.Reset();
mAudioPackets.Reset();
return res;
}
void WebMReader::Cleanup()
{
if (mContext) {
nestegg_destroy(mContext);
mContext = nullptr;
}
}
RefPtr<MediaDecoderReader::MetadataPromise>
WebMReader::AsyncReadMetadata()
{
RefPtr<MetadataHolder> metadata = new MetadataHolder();
if (NS_FAILED(RetrieveWebMMetadata(&metadata->mInfo)) ||
!metadata->mInfo.HasValidMedia()) {
return MetadataPromise::CreateAndReject(ReadMetadataFailureReason::METADATA_ERROR,
__func__);
}
return MetadataPromise::CreateAndResolve(metadata, __func__);
}
nsresult
WebMReader::RetrieveWebMMetadata(MediaInfo* aInfo)
{
MOZ_ASSERT(OnTaskQueue());
nestegg_io io;
io.read = webm_read;
io.seek = webm_seek;
io.tell = webm_tell;
io.userdata = &mResource;
int r = nestegg_init(&mContext, io, &webm_log, -1);
if (r == -1) {
return NS_ERROR_FAILURE;
}
uint64_t duration = 0;
r = nestegg_duration(mContext, &duration);
if (r == 0) {
mInfo.mMetadataDuration.emplace(TimeUnit::FromNanoseconds(duration));
}
unsigned int ntracks = 0;
r = nestegg_track_count(mContext, &ntracks);
if (r == -1) {
Cleanup();
return NS_ERROR_FAILURE;
}
for (uint32_t track = 0; track < ntracks; ++track) {
int id = nestegg_track_codec_id(mContext, track);
if (id == -1) {
Cleanup();
return NS_ERROR_FAILURE;
}
int type = nestegg_track_type(mContext, track);
if (!mHasVideo && type == NESTEGG_TRACK_VIDEO &&
mDecoder->GetImageContainer()) {
nestegg_video_params params;
r = nestegg_track_video_params(mContext, track, &params);
if (r == -1) {
Cleanup();
return NS_ERROR_FAILURE;
}
mVideoCodec = nestegg_track_codec_id(mContext, track);
if (!mVideoDecoder) {
mVideoDecoder = SoftwareWebMVideoDecoder::Create(this);
}
if (!mVideoDecoder ||
NS_FAILED(mVideoDecoder->Init(params.display_width,
params.display_height))) {
Cleanup();
return NS_ERROR_FAILURE;
}
// Picture region, taking into account cropping, before scaling
// to the display size.
unsigned int cropH = params.crop_right + params.crop_left;
unsigned int cropV = params.crop_bottom + params.crop_top;
nsIntRect pictureRect(params.crop_left,
params.crop_top,
params.width - cropH,
params.height - cropV);
// If the cropping data appears invalid then use the frame data
if (pictureRect.width <= 0 ||
pictureRect.height <= 0 ||
pictureRect.x < 0 ||
pictureRect.y < 0) {
pictureRect.x = 0;
pictureRect.y = 0;
pictureRect.width = params.width;
pictureRect.height = params.height;
}
// Validate the container-reported frame and pictureRect sizes. This
// ensures that our video frame creation code doesn't overflow.
nsIntSize displaySize(params.display_width, params.display_height);
nsIntSize frameSize(params.width, params.height);
if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
// Video track's frame sizes will overflow. Ignore the video track.
continue;
}
mVideoTrack = track;
mHasVideo = true;
mInfo.mVideo.mDisplay = displaySize;
mPicture = pictureRect;
mInitialFrame = frameSize;
switch (params.stereo_mode) {
case NESTEGG_VIDEO_MONO:
mInfo.mVideo.mStereoMode = StereoMode::MONO;
break;
case NESTEGG_VIDEO_STEREO_LEFT_RIGHT:
mInfo.mVideo.mStereoMode = StereoMode::LEFT_RIGHT;
break;
case NESTEGG_VIDEO_STEREO_BOTTOM_TOP:
mInfo.mVideo.mStereoMode = StereoMode::BOTTOM_TOP;
break;
case NESTEGG_VIDEO_STEREO_TOP_BOTTOM:
mInfo.mVideo.mStereoMode = StereoMode::TOP_BOTTOM;
break;
case NESTEGG_VIDEO_STEREO_RIGHT_LEFT:
mInfo.mVideo.mStereoMode = StereoMode::RIGHT_LEFT;
break;
}
} else if (!mHasAudio && type == NESTEGG_TRACK_AUDIO) {
nestegg_audio_params params;
r = nestegg_track_audio_params(mContext, track, &params);
if (r == -1) {
Cleanup();
return NS_ERROR_FAILURE;
}
mAudioTrack = track;
mHasAudio = true;
mAudioCodec = nestegg_track_codec_id(mContext, track);
mCodecDelay = params.codec_delay / NS_PER_USEC;
mSeekPreroll = params.seek_preroll;
if (mAudioCodec == NESTEGG_CODEC_VORBIS) {
mAudioDecoder = new VorbisDecoder(this);
} else if (mAudioCodec == NESTEGG_CODEC_OPUS) {
mAudioDecoder = new OpusDecoder(this);
} else {
Cleanup();
return NS_ERROR_FAILURE;
}
if (!mAudioDecoder || NS_FAILED(mAudioDecoder->Init())) {
Cleanup();
return NS_ERROR_FAILURE;
}
unsigned int nheaders = 0;
r = nestegg_track_codec_data_count(mContext, track, &nheaders);
if (r == -1) {
Cleanup();
return NS_ERROR_FAILURE;
}
for (uint32_t header = 0; header < nheaders; ++header) {
unsigned char* data = 0;
size_t length = 0;
r = nestegg_track_codec_data(mContext, track, header, &data, &length);
if (r == -1) {
Cleanup();
return NS_ERROR_FAILURE;
}
if (NS_FAILED(mAudioDecoder->DecodeHeader(data, length))) {
Cleanup();
return NS_ERROR_FAILURE;
}
}
if (NS_FAILED(mAudioDecoder->FinishInit(mInfo.mAudio))) {
Cleanup();
return NS_ERROR_FAILURE;
}
}
}
mInfo.mMediaSeekable = nestegg_has_cues(mContext);
*aInfo = mInfo;
return NS_OK;
}
bool WebMReader::DecodeAudioPacket(NesteggPacketHolder* aHolder)
{
MOZ_ASSERT(OnTaskQueue());
int r = 0;
unsigned int count = 0;
r = nestegg_packet_count(aHolder->Packet(), &count);
if (r == -1) {
return false;
}
int64_t tstamp = aHolder->Timestamp();
if (mAudioStartUsec == -1) {
// This is the first audio chunk. Assume the start time of our decode
// is the start of this chunk.
mAudioStartUsec = tstamp;
}
// If there's a gap between the start of this audio chunk and the end of
// the previous audio chunk, we need to increment the packet count so that
// the vorbis decode doesn't use data from before the gap to help decode
// from after the gap.
CheckedInt64 tstamp_frames = UsecsToFrames(tstamp, mInfo.mAudio.mRate);
CheckedInt64 decoded_frames = UsecsToFrames(mAudioStartUsec,
mInfo.mAudio.mRate);
if (!tstamp_frames.isValid() || !decoded_frames.isValid()) {
NS_WARNING("Int overflow converting WebM times to frames");
return false;
}
decoded_frames += mAudioFrames;
if (!decoded_frames.isValid()) {
NS_WARNING("Int overflow adding decoded_frames");
return false;
}
if (tstamp_frames.value() > decoded_frames.value()) {
#ifdef DEBUG
int64_t gap_frames = tstamp_frames.value() - decoded_frames.value();
CheckedInt64 usecs = FramesToUsecs(gap_frames, mInfo.mAudio.mRate);
LOG(LogLevel::Debug, ("WebMReader detected gap of %lld, %lld frames, in audio",
usecs.isValid() ? usecs.value() : -1,
gap_frames));
#endif
mAudioStartUsec = tstamp;
mAudioFrames = 0;
}
int32_t total_frames = 0;
for (uint32_t i = 0; i < count; ++i) {
unsigned char* data;
size_t length;
r = nestegg_packet_data(aHolder->Packet(), i, &data, &length);
if (r == -1) {
return false;
}
int64_t discardPadding = 0;
(void) nestegg_packet_discard_padding(aHolder->Packet(), &discardPadding);
if (!mAudioDecoder->Decode(data, length, aHolder->Offset(), tstamp, discardPadding, &total_frames)) {
mHitAudioDecodeError = true;
return false;
}
}
mAudioFrames += total_frames;
return true;
}
RefPtr<NesteggPacketHolder> WebMReader::NextPacket(TrackType aTrackType)
{
// The packet queue that packets will be pushed on if they
// are not the type we are interested in.
WebMPacketQueue& otherPackets =
aTrackType == VIDEO ? mAudioPackets : mVideoPackets;
// The packet queue for the type that we are interested in.
WebMPacketQueue &packets =
aTrackType == VIDEO ? mVideoPackets : mAudioPackets;
// Flag to indicate that we do need to playback these types of
// packets.
bool hasType = aTrackType == VIDEO ? mHasVideo : mHasAudio;
// Flag to indicate that we do need to playback the other type
// of track.
bool hasOtherType = aTrackType == VIDEO ? mHasAudio : mHasVideo;
// Track we are interested in
uint32_t ourTrack = aTrackType == VIDEO ? mVideoTrack : mAudioTrack;
// Value of other track
uint32_t otherTrack = aTrackType == VIDEO ? mAudioTrack : mVideoTrack;
if (packets.GetSize() > 0) {
return packets.PopFront();
}
do {
RefPtr<NesteggPacketHolder> holder = DemuxPacket();
if (!holder) {
return nullptr;
}
if (hasOtherType && otherTrack == holder->Track()) {
// Save the packet for when we want these packets
otherPackets.Push(holder);
continue;
}
// The packet is for the track we want to play
if (hasType && ourTrack == holder->Track()) {
return holder;
}
} while (true);
}
RefPtr<NesteggPacketHolder>
WebMReader::DemuxPacket()
{
nestegg_packet* packet;
int r = nestegg_read_packet(mContext, &packet);
if (r <= 0) {
return nullptr;
}
unsigned int track = 0;
r = nestegg_packet_track(packet, &track);
if (r == -1) {
return nullptr;
}
// Figure out if this is a keyframe.
bool isKeyframe = false;
if (track == mAudioTrack) {
isKeyframe = true;
} else if (track == mVideoTrack) {
unsigned char* data;
size_t length;
r = nestegg_packet_data(packet, 0, &data, &length);
if (r == -1) {
return nullptr;
}
vpx_codec_stream_info_t si;
memset(&si, 0, sizeof(si));
si.sz = sizeof(si);
if (mVideoCodec == NESTEGG_CODEC_VP8) {
vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
} else if (mVideoCodec == NESTEGG_CODEC_VP9) {
vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
}
isKeyframe = si.is_kf;
}
int64_t offset = mResource.Tell();
RefPtr<NesteggPacketHolder> holder = new NesteggPacketHolder();
if (!holder->Init(packet, offset, track, isKeyframe)) {
return nullptr;
}
return holder;
}
bool WebMReader::DecodeAudioData()
{
MOZ_ASSERT(OnTaskQueue());
RefPtr<NesteggPacketHolder> holder(NextPacket(AUDIO));
if (!holder) {
return false;
}
return DecodeAudioPacket(holder);
}
bool WebMReader::FilterPacketByTime(int64_t aEndTime, WebMPacketQueue& aOutput)
{
// Push the video frames to the aOutput which's timestamp is less
// than aEndTime.
while (true) {
RefPtr<NesteggPacketHolder> holder(NextPacket(VIDEO));
if (!holder) {
break;
}
int64_t tstamp = holder->Timestamp();
if (tstamp >= aEndTime) {
PushVideoPacket(holder);
return true;
} else {
aOutput.PushFront(holder);
}
}
return false;
}
int64_t WebMReader::GetNextKeyframeTime(int64_t aTimeThreshold)
{
WebMPacketQueue skipPacketQueue;
if (!FilterPacketByTime(aTimeThreshold, skipPacketQueue)) {
// Restore the packets before we return -1.
uint32_t size = skipPacketQueue.GetSize();
for (uint32_t i = 0; i < size; ++i) {
RefPtr<NesteggPacketHolder> packetHolder = skipPacketQueue.PopFront();
PushVideoPacket(packetHolder);
}
return -1;
}
// Find keyframe.
bool foundKeyframe = false;
int64_t keyframeTime = -1;
while (!foundKeyframe) {
RefPtr<NesteggPacketHolder> holder(NextPacket(VIDEO));
if (!holder) {
break;
}
if (holder->IsKeyframe()) {
foundKeyframe = true;
keyframeTime = holder->Timestamp();
}
skipPacketQueue.PushFront(holder);
}
uint32_t size = skipPacketQueue.GetSize();
for (uint32_t i = 0; i < size; ++i) {
RefPtr<NesteggPacketHolder> packetHolder = skipPacketQueue.PopFront();
PushVideoPacket(packetHolder);
}
return keyframeTime;
}
bool WebMReader::ShouldSkipVideoFrame(int64_t aTimeThreshold)
{
return GetNextKeyframeTime(aTimeThreshold) != -1;
}
bool WebMReader::DecodeVideoFrame(bool &aKeyframeSkip, int64_t aTimeThreshold)
{
if (!(aKeyframeSkip && ShouldSkipVideoFrame(aTimeThreshold))) {
LOG(LogLevel::Verbose, ("Reader [%p]: set the aKeyframeSkip to false.",this));
aKeyframeSkip = false;
}
return mVideoDecoder->DecodeVideoFrame(aKeyframeSkip, aTimeThreshold);
}
void WebMReader::PushVideoPacket(NesteggPacketHolder* aItem)
{
mVideoPackets.PushFront(aItem);
}
RefPtr<MediaDecoderReader::SeekPromise>
WebMReader::Seek(int64_t aTarget, int64_t aEndTime)
{
nsresult res = SeekInternal(aTarget);
if (NS_FAILED(res)) {
return SeekPromise::CreateAndReject(res, __func__);
} else {
return SeekPromise::CreateAndResolve(aTarget, __func__);
}
}
nsresult WebMReader::SeekInternal(int64_t aTarget)
{
MOZ_ASSERT(OnTaskQueue());
NS_ENSURE_TRUE(HaveStartTime(), NS_ERROR_FAILURE);
if (mVideoDecoder) {
nsresult rv = mVideoDecoder->Flush();
NS_ENSURE_SUCCESS(rv, rv);
}
LOG(LogLevel::Debug, ("Reader [%p] for Decoder [%p]: About to seek to %fs",
this, mDecoder, double(aTarget) / USECS_PER_S));
if (NS_FAILED(ResetDecode())) {
return NS_ERROR_FAILURE;
}
uint32_t trackToSeek = mHasVideo ? mVideoTrack : mAudioTrack;
uint64_t target = aTarget * NS_PER_USEC;
if (mSeekPreroll) {
uint64_t startTime = uint64_t(StartTime()) * NS_PER_USEC;
if (target < mSeekPreroll || target - mSeekPreroll < startTime) {
target = startTime;
} else {
target -= mSeekPreroll;
}
LOG(LogLevel::Debug,
("Reader [%p] SeekPreroll: %f StartTime: %f AdjustedTarget: %f",
this, double(mSeekPreroll) / NS_PER_S,
double(startTime) / NS_PER_S, double(target) / NS_PER_S));
}
int r = nestegg_track_seek(mContext, trackToSeek, target);
if (r != 0) {
LOG(LogLevel::Debug, ("Reader [%p]: track_seek for track %u failed, r=%d",
this, trackToSeek, r));
// Try seeking directly based on cluster information in memory.
int64_t offset = 0;
bool rv = mBufferedState->GetOffsetForTime(target, &offset);
if (!rv) {
return NS_ERROR_FAILURE;
}
r = nestegg_offset_seek(mContext, offset);
LOG(LogLevel::Debug, ("Reader [%p]: attempted offset_seek to %lld r=%d",
this, offset, r));
if (r != 0) {
return NS_ERROR_FAILURE;
}
}
return NS_OK;
}
media::TimeIntervals WebMReader::GetBuffered()
{
MOZ_ASSERT(OnTaskQueue());
if (!HaveStartTime()) {
return media::TimeIntervals();
}
AutoPinned<MediaResource> resource(mDecoder->GetResource());
media::TimeIntervals buffered;
// Special case completely cached files. This also handles local files.
if (mContext && resource->IsDataCachedToEndOfResource(0)) {
uint64_t duration = 0;
if (nestegg_duration(mContext, &duration) == 0) {
buffered +=
media::TimeInterval(media::TimeUnit::FromSeconds(0),
media::TimeUnit::FromSeconds(duration / NS_PER_S));
return buffered;
}
}
// Either we the file is not fully cached, or we couldn't find a duration in
// the WebM bitstream.
MediaByteRangeSet ranges;
nsresult res = resource->GetCachedRanges(ranges);
NS_ENSURE_SUCCESS(res, media::TimeIntervals::Invalid());
for (uint32_t index = 0; index < ranges.Length(); index++) {
uint64_t start, end;
bool rv = mBufferedState->CalculateBufferedForRange(ranges[index].mStart,
ranges[index].mEnd,
&start, &end);
if (rv) {
int64_t startOffset = StartTime() * NS_PER_USEC;
NS_ASSERTION(startOffset >= 0 && uint64_t(startOffset) <= start,
"startOffset negative or larger than start time");
if (!(startOffset >= 0 && uint64_t(startOffset) <= start)) {
startOffset = 0;
}
double startTime = (start - startOffset) / NS_PER_S;
double endTime = (end - startOffset) / NS_PER_S;
// If this range extends to the end of the file, the true end time
// is the file's duration.
if (mContext &&
resource->IsDataCachedToEndOfResource(ranges[index].mStart)) {
uint64_t duration = 0;
if (nestegg_duration(mContext, &duration) == 0) {
endTime = duration / NS_PER_S;
}
}
buffered += media::TimeInterval(media::TimeUnit::FromSeconds(startTime),
media::TimeUnit::FromSeconds(endTime));
}
}
return buffered;
}
void WebMReader::NotifyDataArrivedInternal()
{
MOZ_ASSERT(OnTaskQueue());
AutoPinned<MediaResource> resource(mDecoder->GetResource());
MediaByteRangeSet byteRanges;
nsresult rv = resource->GetCachedRanges(byteRanges);
if (NS_FAILED(rv)) {
return;
}
for (auto& range : byteRanges) {
RefPtr<MediaByteBuffer> bytes =
resource->MediaReadAt(range.mStart, range.Length());
NS_ENSURE_TRUE_VOID(bytes);
mBufferedState->NotifyDataArrived(bytes->Elements(), bytes->Length(), range.mStart);
}
}
int WebMReader::GetVideoCodec()
{
return mVideoCodec;
}
nsIntRect WebMReader::GetPicture()
{
return mPicture;
}
nsIntSize WebMReader::GetInitialFrame()
{
return mInitialFrame;
}
int64_t WebMReader::GetLastVideoFrameTime()
{
return mLastVideoFrameTime;
}
void WebMReader::SetLastVideoFrameTime(int64_t aFrameTime)
{
mLastVideoFrameTime = aFrameTime;
}
} // namespace mozilla

Просмотреть файл

@ -1,213 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(WebMReader_h_)
#define WebMReader_h_
#include <stdint.h>
#include "FlushableTaskQueue.h"
#include "MediaDecoderReader.h"
#include "MediaResource.h"
#include "nsAutoRef.h"
#include "nestegg/nestegg.h"
#define VPX_DONT_DEFINE_STDINT_TYPES
#include "vpx/vpx_codec.h"
#include "mozilla/layers/LayersTypes.h"
#include "NesteggPacketHolder.h"
namespace mozilla {
static const unsigned NS_PER_USEC = 1000;
static const double NS_PER_S = 1e9;
typedef TrackInfo::TrackType TrackType;
class WebMBufferedState;
class WebMPacketQueue;
class WebMReader;
// Class to handle various video decode paths
class WebMVideoDecoder
{
public:
virtual nsresult Init(unsigned int aWidth = 0, unsigned int aHeight = 0) = 0;
virtual nsresult Flush() { return NS_OK; }
virtual void Shutdown() = 0;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold) = 0;
WebMVideoDecoder() {}
virtual ~WebMVideoDecoder() {}
};
// Class to handle various audio decode paths
class WebMAudioDecoder
{
public:
virtual nsresult Init() = 0;
virtual void Shutdown() = 0;
virtual nsresult ResetDecode() = 0;
virtual nsresult DecodeHeader(const unsigned char* aData, size_t aLength) = 0;
virtual nsresult FinishInit(AudioInfo& aInfo) = 0;
virtual bool Decode(const unsigned char* aData, size_t aLength,
int64_t aOffset, uint64_t aTstampUsecs,
int64_t aDiscardPadding, int32_t* aTotalFrames) = 0;
virtual ~WebMAudioDecoder() {}
};
class WebMReader : public MediaDecoderReader
{
public:
explicit WebMReader(AbstractMediaDecoder* aDecoder);
protected:
~WebMReader();
public:
// Returns a pointer to the decoder.
AbstractMediaDecoder* GetDecoder()
{
return mDecoder;
}
MediaInfo GetMediaInfo() { return mInfo; }
virtual RefPtr<ShutdownPromise> Shutdown() override;
virtual nsresult Init() override;
virtual nsresult ResetDecode() override;
virtual bool DecodeAudioData() override;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold) override;
virtual RefPtr<MetadataPromise> AsyncReadMetadata() override;
virtual RefPtr<SeekPromise>
Seek(int64_t aTime, int64_t aEndTime) override;
virtual media::TimeIntervals GetBuffered() override;
// Value passed to NextPacket to determine if we are reading a video or an
// audio packet.
enum TrackType {
VIDEO = 0,
AUDIO = 1
};
// Read a packet from the nestegg file. Returns nullptr if all packets for
// the particular track have been read. Pass VIDEO or AUDIO to indicate the
// type of the packet we want to read.
RefPtr<NesteggPacketHolder> NextPacket(TrackType aTrackType);
// Pushes a packet to the front of the video packet queue.
virtual void PushVideoPacket(NesteggPacketHolder* aItem);
int GetVideoCodec();
nsIntRect GetPicture();
nsIntSize GetInitialFrame();
int64_t GetLastVideoFrameTime();
void SetLastVideoFrameTime(int64_t aFrameTime);
layers::LayersBackend GetLayersBackendType() { return mLayersBackendType; }
uint64_t GetCodecDelay() { return mCodecDelay; }
protected:
virtual void NotifyDataArrivedInternal() override;
// Decode a nestegg packet of audio data. Push the audio data on the
// audio queue. Returns true when there's more audio to decode,
// false if the audio is finished, end of file has been reached,
// or an un-recoverable read error has occured. The reader's monitor
// must be held during this call. The caller is responsible for freeing
// aPacket.
bool DecodeAudioPacket(NesteggPacketHolder* aHolder);
// Release context and set to null. Called when an error occurs during
// reading metadata or destruction of the reader itself.
void Cleanup();
virtual nsresult SeekInternal(int64_t aTime);
// Initializes mLayersBackendType if possible.
void InitLayersBackendType();
bool ShouldSkipVideoFrame(int64_t aTimeThreshold);
private:
nsresult RetrieveWebMMetadata(MediaInfo* aInfo);
// Get the timestamp of keyframe greater than aTimeThreshold.
int64_t GetNextKeyframeTime(int64_t aTimeThreshold);
// Push the packets into aOutput which's timestamp is less than aEndTime.
// Return false if we reach the end of stream or something wrong.
bool FilterPacketByTime(int64_t aEndTime, WebMPacketQueue& aOutput);
// Internal method that demuxes the next packet from the stream. The caller
// is responsible for making sure it doesn't get lost.
RefPtr<NesteggPacketHolder> DemuxPacket();
// libnestegg context for webm container. Access on state machine thread
// or decoder thread only.
nestegg* mContext;
nsAutoPtr<WebMAudioDecoder> mAudioDecoder;
nsAutoPtr<WebMVideoDecoder> mVideoDecoder;
// Queue of video and audio packets that have been read but not decoded. These
// must only be accessed from the decode thread.
WebMPacketQueue mVideoPackets;
WebMPacketQueue mAudioPackets;
// Index of video and audio track to play
uint32_t mVideoTrack;
uint32_t mAudioTrack;
// Time in microseconds of the start of the first audio frame we've decoded.
int64_t mAudioStartUsec;
// Number of audio frames we've decoded since decoding began at mAudioStartMs.
uint64_t mAudioFrames;
// Number of microseconds that must be discarded from the start of the Stream.
uint64_t mCodecDelay;
// Nanoseconds to discard after seeking.
uint64_t mSeekPreroll;
// Calculate the frame duration from the last decodeable frame using the
// previous frame's timestamp. In NS.
int64_t mLastVideoFrameTime;
// Parser state and computed offset-time mappings. Shared by multiple
// readers when decoder has been cloned. Main thread only.
RefPtr<WebMBufferedState> mBufferedState;
// Size of the frame initially present in the stream. The picture region
// is defined as a ratio relative to this.
nsIntSize mInitialFrame;
// Picture region, as relative to the initial frame size.
nsIntRect mPicture;
// Codec ID of audio track
int mAudioCodec;
// Codec ID of video track
int mVideoCodec;
layers::LayersBackend mLayersBackendType;
// Booleans to indicate if we have audio and/or video data
bool mHasVideo;
bool mHasAudio;
MediaResourceIndex mResource;
};
} // namespace mozilla
#endif

Просмотреть файл

@ -6,20 +6,15 @@
EXPORTS += [
'NesteggPacketHolder.h',
'SoftwareWebMVideoDecoder.h',
'WebMBufferedParser.h',
'WebMDecoder.h',
'WebMDemuxer.h',
'WebMReader.h',
]
UNIFIED_SOURCES += [
'AudioDecoder.cpp',
'SoftwareWebMVideoDecoder.cpp',
'WebMBufferedParser.cpp',
'WebMDecoder.cpp',
'WebMDemuxer.cpp',
'WebMReader.cpp',
]
if CONFIG['MOZ_WEBM_ENCODER']:

Просмотреть файл

@ -136,9 +136,12 @@ public:
TrackID aId,
StreamTime aDesiredTime) override
{
NS_WARN_IF_FALSE(!aSource->FindTrack(aId) ||
#ifdef DEBUG
StreamBuffer::Track* data = aSource->FindTrack(aId);
NS_WARN_IF_FALSE(!data || data->IsEnded() ||
aDesiredTime <= aSource->GetEndOfAppendedData(aId),
"MediaEngineDefaultAudioSource data underrun");
#endif
}
virtual bool IsFake() override {

Просмотреть файл

@ -286,6 +286,11 @@ NS_IMPL_RELEASE(SpeechDispatcherService)
SpeechDispatcherService::SpeechDispatcherService()
: mInitialized(false)
, mSpeechdClient(nullptr)
{
}
void
SpeechDispatcherService::Init()
{
if (!Preferences::GetBool("media.webspeech.synth.enabled") ||
Preferences::GetBool("media.webspeech.synth.test")) {
@ -299,7 +304,7 @@ SpeechDispatcherService::SpeechDispatcherService()
getter_AddRefs(mInitThread));
MOZ_ASSERT(NS_SUCCEEDED(rv));
rv = mInitThread->Dispatch(
NS_NewRunnableMethod(this, &SpeechDispatcherService::Init), NS_DISPATCH_NORMAL);
NS_NewRunnableMethod(this, &SpeechDispatcherService::Setup), NS_DISPATCH_NORMAL);
MOZ_ASSERT(NS_SUCCEEDED(rv));
}
@ -315,7 +320,7 @@ SpeechDispatcherService::~SpeechDispatcherService()
}
void
SpeechDispatcherService::Init()
SpeechDispatcherService::Setup()
{
#define FUNC(name, type, params) { #name, (nsSpeechDispatcherFunc *)&_##name },
static const nsSpeechDispatcherDynamicFunction kSpeechDispatcherSymbols[] = {
@ -534,6 +539,7 @@ SpeechDispatcherService::GetInstance(bool create)
if (!sSingleton && create) {
sSingleton = new SpeechDispatcherService();
sSingleton->Init();
}
return sSingleton;

Просмотреть файл

@ -30,8 +30,11 @@ public:
NS_DECL_NSISPEECHSERVICE
SpeechDispatcherService();
void Init();
void Setup();
void EventNotify(uint32_t aMsgId, uint32_t aState);
static SpeechDispatcherService* GetInstance(bool create = true);

Просмотреть файл

@ -66,7 +66,7 @@ uint32_t
TCPServerSocketParent::GetAppId()
{
const PContentParent *content = Manager()->Manager();
if (PBrowserParent* browser = LoneManagedOrNull(content->ManagedPBrowserParent())) {
if (PBrowserParent* browser = SingleManagedOrNull(content->ManagedPBrowserParent())) {
TabParent *tab = TabParent::GetFrom(browser);
return tab->OwnAppId();
} else {
@ -78,7 +78,7 @@ bool
TCPServerSocketParent::GetInBrowser()
{
const PContentParent *content = Manager()->Manager();
if (PBrowserParent* browser = LoneManagedOrNull(content->ManagedPBrowserParent())) {
if (PBrowserParent* browser = SingleManagedOrNull(content->ManagedPBrowserParent())) {
TabParent *tab = TabParent::GetFrom(browser);
return tab->IsBrowserElement();
} else {

Просмотреть файл

@ -68,7 +68,7 @@ uint32_t
TCPSocketParent::GetAppId()
{
const PContentParent *content = Manager()->Manager();
if (PBrowserParent* browser = LoneManagedOrNull(content->ManagedPBrowserParent())) {
if (PBrowserParent* browser = SingleManagedOrNull(content->ManagedPBrowserParent())) {
TabParent *tab = TabParent::GetFrom(browser);
return tab->OwnAppId();
} else {
@ -80,7 +80,7 @@ bool
TCPSocketParent::GetInBrowser()
{
const PContentParent *content = Manager()->Manager();
if (PBrowserParent* browser = LoneManagedOrNull(content->ManagedPBrowserParent())) {
if (PBrowserParent* browser = SingleManagedOrNull(content->ManagedPBrowserParent())) {
TabParent *tab = TabParent::GetFrom(browser);
return tab->IsBrowserElement();
} else {
@ -223,7 +223,9 @@ TCPSocketParent::RecvOpenBind(const nsCString& aRemoteHost,
uint32_t appId = nsIScriptSecurityManager::NO_APP_ID;
bool inBrowser = false;
const PContentParent *content = Manager()->Manager();
if (PBrowserParent* browser = LoneManagedOrNull(content->ManagedPBrowserParent())) {
if (PBrowserParent* browser = SingleManagedOrNull(content->ManagedPBrowserParent())) {
// appId's are for B2G only currently, where managees.Count() == 1
// This is not guaranteed currently in Desktop, so skip this there.
TabParent *tab = TabParent::GetFrom(browser);
appId = tab->OwnAppId();
inBrowser = tab->IsBrowserElement();

Просмотреть файл

@ -1504,7 +1504,7 @@ PluginModuleChromeParent::OnHangUIContinue()
CrashReporterParent*
PluginModuleChromeParent::CrashReporter()
{
return static_cast<CrashReporterParent*>(LoneManagedOrNull(ManagedPCrashReporterParent()));
return static_cast<CrashReporterParent*>(LoneManagedOrNullAsserts(ManagedPCrashReporterParent()));
}
#ifdef MOZ_CRASHREPORTER_INJECTOR

Просмотреть файл

@ -213,8 +213,8 @@ AudioChannelManager::GetAllowedAudioChannels(
}
nsBrowserElement::GenerateAllowedAudioChannels(window, nullptr, nullptr,
manifestURL, aAudioChannels,
aRv);
manifestURL, nullptr,
aAudioChannels, aRv);
NS_WARN_IF(aRv.Failed());
}

Просмотреть файл

@ -1,34 +0,0 @@
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/.
*/
enum SelectionState {
"drag",
"mousedown",
"mouseup",
"keypress",
"selectall",
"collapsetostart",
"collapsetoend",
"blur",
"updateposition",
"taponcaret"
};
dictionary SelectionStateChangedEventInit : EventInit {
boolean visible = true;
DOMString selectedText = "";
DOMRectReadOnly? boundingClientRect = null;
sequence<SelectionState> states = [];
};
[Constructor(DOMString type, optional SelectionStateChangedEventInit eventInit),
ChromeOnly]
interface SelectionStateChangedEvent : Event {
readonly attribute boolean visible;
readonly attribute DOMString selectedText;
readonly attribute DOMRectReadOnly? boundingClientRect;
[Cached, Pure] readonly attribute sequence<SelectionState> states;
};

Просмотреть файл

@ -815,7 +815,6 @@ GENERATED_EVENTS_WEBIDL_FILES = [
'ProgressEvent.webidl',
'RecordErrorEvent.webidl',
'ScrollViewChangeEvent.webidl',
'SelectionStateChangedEvent.webidl',
'StyleRuleChangeEvent.webidl',
'StyleSheetApplicableStateChangeEvent.webidl',
'StyleSheetChangeEvent.webidl',

Просмотреть файл

@ -1616,6 +1616,17 @@ nsXULElement::GetFrameLoader()
return loader.forget();
}
nsresult
nsXULElement::GetParentApplication(mozIApplication** aApplication)
{
if (!aApplication) {
return NS_ERROR_FAILURE;
}
*aApplication = nullptr;
return NS_OK;
}
nsresult
nsXULElement::SetIsPrerendered()
{

Просмотреть файл

@ -412,6 +412,7 @@ public:
virtual mozilla::EventStates IntrinsicState() const override;
nsresult GetFrameLoader(nsIFrameLoader** aFrameLoader);
nsresult GetParentApplication(mozIApplication** aApplication);
nsresult SetIsPrerendered();
nsresult SwapFrameLoaders(nsIFrameLoaderOwner* aOtherOwner);

Просмотреть файл

@ -59,16 +59,4 @@ RESOURCE_FILES += [
'res/table-remove-row-active.gif',
'res/table-remove-row-hover.gif',
'res/table-remove-row.gif',
'res/text_caret.png',
'res/text_caret@1.5x.png',
'res/text_caret@2.25x.png',
'res/text_caret@2x.png',
'res/text_caret_tilt_left.png',
'res/text_caret_tilt_left@1.5x.png',
'res/text_caret_tilt_left@2.25x.png',
'res/text_caret_tilt_left@2x.png',
'res/text_caret_tilt_right.png',
'res/text_caret_tilt_right@1.5x.png',
'res/text_caret_tilt_right@2.25x.png',
'res/text_caret_tilt_right@2x.png',
]

Просмотреть файл

@ -57,10 +57,10 @@ skip-if(B2G||Mulet) random-if(Android) needs-focus != spellcheck-textarea-attr-d
skip-if(B2G||Mulet) random-if(Android) needs-focus != spellcheck-textarea-attr-dynamic-override-inherit.html spellcheck-textarea-ref.html # Initial mulet triage: parity with B2G/B2G Desktop
skip-if(B2G||Mulet) random-if(Android) needs-focus != spellcheck-textarea-property-dynamic-override.html spellcheck-textarea-ref.html # Initial mulet triage: parity with B2G/B2G Desktop
skip-if(B2G||Mulet) random-if(Android) needs-focus != spellcheck-textarea-property-dynamic-override-inherit.html spellcheck-textarea-ref.html # Initial mulet triage: parity with B2G/B2G Desktop
needs-focus pref(touchcaret.enabled,false) == caret_on_focus.html caret_on_focus-ref.html
needs-focus == caret_on_focus.html caret_on_focus-ref.html
needs-focus != caret_on_textarea_lastline.html caret_on_textarea_lastline-ref.html
needs-focus pref(touchcaret.enabled,false) pref(selectioncaret.enabled,false) == input-text-onfocus-reframe.html input-text-onfocus-reframe-ref.html
needs-focus pref(touchcaret.enabled,false) pref(selectioncaret.enabled,false) == input-text-notheme-onfocus-reframe.html input-text-notheme-onfocus-reframe-ref.html
needs-focus == input-text-onfocus-reframe.html input-text-onfocus-reframe-ref.html
needs-focus == input-text-notheme-onfocus-reframe.html input-text-notheme-onfocus-reframe-ref.html
skip-if(B2G||Mulet) needs-focus == caret_after_reframe.html caret_after_reframe-ref.html # B2G timed out waiting for reftest-wait to be removed # Initial mulet triage: parity with B2G/B2G Desktop
== nobogusnode-1.html nobogusnode-ref.html
== nobogusnode-2.html nobogusnode-ref.html
@ -104,15 +104,15 @@ skip-if(Android||B2G||Mulet) needs-focus == 462758-grabbers-resizers.html 462758
== 388980-1.html 388980-1-ref.html
needs-focus == spellcheck-superscript-1.html spellcheck-superscript-1-ref.html
skip-if(B2G||Mulet) fails-if(Android) needs-focus != spellcheck-superscript-2.html spellcheck-superscript-2-ref.html # bug 783658 # Initial mulet triage: parity with B2G/B2G Desktop
needs-focus pref(selectioncaret.enabled,false) pref(layout.accessiblecaret.enabled,false) == 824080-1.html 824080-1-ref.html
needs-focus pref(selectioncaret.enabled,false) pref(layout.accessiblecaret.enabled,false) == 824080-2.html 824080-2-ref.html
needs-focus pref(selectioncaret.enabled,false) pref(layout.accessiblecaret.enabled,false) == 824080-3.html 824080-3-ref.html
needs-focus pref(layout.accessiblecaret.enabled,false) == 824080-1.html 824080-1-ref.html
needs-focus pref(layout.accessiblecaret.enabled,false) == 824080-2.html 824080-2-ref.html
needs-focus pref(layout.accessiblecaret.enabled,false) == 824080-3.html 824080-3-ref.html
needs-focus != 824080-2.html 824080-3.html
needs-focus pref(selectioncaret.enabled,false) pref(layout.accessiblecaret.enabled,false) == 824080-4.html 824080-4-ref.html
needs-focus pref(selectioncaret.enabled,false) pref(layout.accessiblecaret.enabled,false) == 824080-5.html 824080-5-ref.html
needs-focus pref(layout.accessiblecaret.enabled,false) == 824080-4.html 824080-4-ref.html
needs-focus pref(layout.accessiblecaret.enabled,false) == 824080-5.html 824080-5-ref.html
needs-focus != 824080-4.html 824080-5.html
needs-focus == 824080-6.html 824080-6-ref.html
needs-focus pref(selectioncaret.enabled,false) pref(layout.accessiblecaret.enabled,false) == 824080-7.html 824080-7-ref.html
needs-focus pref(layout.accessiblecaret.enabled,false) == 824080-7.html 824080-7-ref.html
needs-focus != 824080-6.html 824080-7.html
# Bug 674927: copy spellcheck-textarea tests to contenteditable
== spellcheck-contenteditable-attr.html spellcheck-contenteditable-nofocus-ref.html

Просмотреть файл

@ -28,6 +28,8 @@ public:
TextureFlags aFlags = TextureFlags::DEFAULT,
TextureAllocationFlags aAllocFlags = ALLOC_DEFAULT) const override;
virtual bool UpdateFromSurface(gfx::SourceSurface* aSurface) override;
static
DIBTextureData* Create(gfx::IntSize aSize, gfx::SurfaceFormat aFormat);
@ -63,6 +65,8 @@ public:
TextureFlags aFlags = TextureFlags::DEFAULT,
TextureAllocationFlags aAllocFlags = ALLOC_DEFAULT) const override;
virtual bool UpdateFromSurface(gfx::SourceSurface* aSurface) override;
static
DIBTextureData* Create(gfx::IntSize aSize, gfx::SurfaceFormat aFormat,
ISurfaceAllocator* aAllocator);
@ -101,6 +105,10 @@ public:
virtual ~ShmemDIBTextureData()
{
MOZ_COUNT_DTOR(ShmemDIBTextureData);
// The host side has its own references and handles to this data, we can
// safely clear ours.
DeallocateData();
}
HANDLE mFileMapping;
@ -115,31 +123,6 @@ DIBTextureData::BorrowDrawTarget()
return gfxPlatform::GetPlatform()->CreateDrawTargetForSurface(mSurface, mSize);
}
bool
DIBTextureData::UpdateFromSurface(gfx::SourceSurface* aSurface)
{
RefPtr<gfxImageSurface> imgSurf = mSurface->GetAsImageSurface();
RefPtr<DataSourceSurface> srcSurf = aSurface->GetDataSurface();
if (!srcSurf) {
gfxCriticalError() << "Failed to GetDataSurface in UpdateFromSurface.";
return false;
}
DataSourceSurface::MappedSurface sourceMap;
srcSurf->Map(DataSourceSurface::READ, &sourceMap);
for (int y = 0; y < srcSurf->GetSize().height; y++) {
memcpy(imgSurf->Data() + imgSurf->Stride() * y,
sourceMap.mData + sourceMap.mStride * y,
srcSurf->GetSize().width * BytesPerPixel(srcSurf->GetFormat()));
}
srcSurf->Unmap();
return true;
}
DIBTextureData*
DIBTextureData::Create(gfx::IntSize aSize, gfx::SurfaceFormat aFormat,
ISurfaceAllocator* aAllocator)
@ -192,6 +175,34 @@ MemoryDIBTextureData::Create(gfx::IntSize aSize, gfx::SurfaceFormat aFormat)
return new MemoryDIBTextureData(aSize, aFormat, surface);
}
bool
MemoryDIBTextureData::UpdateFromSurface(gfx::SourceSurface* aSurface)
{
RefPtr<gfxImageSurface> imgSurf = mSurface->GetAsImageSurface();
RefPtr<DataSourceSurface> srcSurf = aSurface->GetDataSurface();
if (!srcSurf) {
gfxCriticalError() << "Failed to GetDataSurface in UpdateFromSurface.";
return false;
}
DataSourceSurface::MappedSurface sourceMap;
if (!srcSurf->Map(gfx::DataSourceSurface::READ, &sourceMap)) {
gfxCriticalError() << "Failed to map source surface for UpdateFromSurface.";
return false;
}
for (int y = 0; y < srcSurf->GetSize().height; y++) {
memcpy(imgSurf->Data() + imgSurf->Stride() * y,
sourceMap.mData + sourceMap.mStride * y,
srcSurf->GetSize().width * BytesPerPixel(srcSurf->GetFormat()));
}
srcSurf->Unmap();
return true;
}
TextureData*
ShmemDIBTextureData::CreateSimilar(ISurfaceAllocator* aAllocator,
TextureFlags aFlags,
@ -203,6 +214,46 @@ ShmemDIBTextureData::CreateSimilar(ISurfaceAllocator* aAllocator,
return ShmemDIBTextureData::Create(mSize, mFormat, aAllocator);
}
bool
ShmemDIBTextureData::UpdateFromSurface(gfx::SourceSurface* aSurface)
{
RefPtr<DataSourceSurface> srcSurf = aSurface->GetDataSurface();
if (!srcSurf) {
gfxCriticalError() << "Failed to GetDataSurface in UpdateFromSurface.";
return false;
}
DataSourceSurface::MappedSurface sourceMap;
if (!srcSurf->Map(gfx::DataSourceSurface::READ, &sourceMap)) {
gfxCriticalError() << "Failed to map source surface for UpdateFromSurface.";
return false;
}
GdiFlush();
uint32_t stride = mSize.width * BytesPerPixel(mFormat);
uint8_t* data = (uint8_t*)::MapViewOfFile(mFileMapping, FILE_MAP_WRITE, 0, 0, stride * mSize.height);
if (!data) {
gfxCriticalError() << "Failed to map view of file for UpdateFromSurface.";
srcSurf->Unmap();
return false;
}
for (int y = 0; y < srcSurf->GetSize().height; y++) {
memcpy(data + stride * y,
sourceMap.mData + sourceMap.mStride * y,
srcSurf->GetSize().width * BytesPerPixel(srcSurf->GetFormat()));
}
::UnmapViewOfFile(data);
srcSurf->Unmap();
return true;
}
bool
ShmemDIBTextureData::Serialize(SurfaceDescriptor& aOutDescriptor)
{
@ -229,14 +280,6 @@ ShmemDIBTextureData::Create(gfx::IntSize aSize, gfx::SurfaceFormat aFormat,
return nullptr;
}
uint8_t* data = (uint8_t*)::MapViewOfFile(fileMapping, FILE_MAP_WRITE | FILE_MAP_READ,
0, 0, aSize.width * aSize.height
* BytesPerPixel(aFormat));
memset(data, 0x80, aSize.width * aSize.height * BytesPerPixel(aFormat));
::UnmapViewOfFile(fileMapping);
BITMAPV4HEADER header;
memset(&header, 0, sizeof(BITMAPV4HEADER));
header.bV4Size = sizeof(BITMAPV4HEADER);
@ -249,7 +292,11 @@ ShmemDIBTextureData::Create(gfx::IntSize aSize, gfx::SurfaceFormat aFormat,
header.bV4GreenMask = 0x0000FF00;
header.bV4BlueMask = 0x000000FF;
HDC dc = ::CreateCompatibleDC(::GetDC(NULL));
HDC nulldc = ::GetDC(NULL);
HDC dc = ::CreateCompatibleDC(nulldc);
::ReleaseDC(nullptr, nulldc);
if (!dc) {
::CloseHandle(fileMapping);
@ -264,7 +311,7 @@ ShmemDIBTextureData::Create(gfx::IntSize aSize, gfx::SurfaceFormat aFormat,
if (!bitmap) {
gfxCriticalError() << "Failed to create DIB section for a bitmap of size "
<< aSize;
<< aSize << " and mapSize " << mapSize;
::CloseHandle(fileMapping);
::DeleteDC(dc);
return nullptr;
@ -392,6 +439,14 @@ TextureHostFileMapping::~TextureHostFileMapping()
::CloseHandle(mFileMapping);
}
UserDataKey kFileMappingKey;
static void UnmapFileData(void* aData)
{
MOZ_ASSERT(aData);
::UnmapViewOfFile(aData);
}
void
TextureHostFileMapping::UpdatedInternal(const nsIntRegion* aRegion)
{
@ -410,14 +465,14 @@ TextureHostFileMapping::UpdatedInternal(const nsIntRegion* aRegion)
if (data) {
RefPtr<DataSourceSurface> surf = Factory::CreateWrappingDataSourceSurface(data, mSize.width * BytesPerPixel(mFormat), mSize, mFormat);
surf->AddUserData(&kFileMappingKey, data, UnmapFileData);
if (!mTextureSource->Update(surf, const_cast<nsIntRegion*>(aRegion))) {
mTextureSource = nullptr;
}
} else {
mTextureSource = nullptr;
}
::UnmapViewOfFile(data);
}
}

Просмотреть файл

@ -30,8 +30,6 @@ public:
virtual already_AddRefed<gfx::DrawTarget> BorrowDrawTarget() override;
virtual bool UpdateFromSurface(gfx::SourceSurface* aSurface) override;
virtual bool HasInternalBuffer() const override { return true; }
static

Просмотреть файл

@ -516,8 +516,14 @@ APZCTreeManager::PrepareNodeForLayer(const LayerMetricsWrapper& aLayer,
// Even though different layers associated with a given APZC may be at
// different levels in the layer tree (e.g. one being an uncle of another),
// we require from Layout that the CSS transforms up to their common
// ancestor be the same.
MOZ_ASSERT(aAncestorTransform == apzc->GetAncestorTransform());
// ancestor be roughly the same. There are cases in which the transforms
// are not exactly the same, for example if the parent is container layer
// for an opacity, and this container layer has a resolution-induced scale
// as its base transform and a prescale that is supposed to undo that scale.
// Due to floating point inaccuracies those transforms can end up not quite
// canceling each other. That's why we're using a fuzzy comparison here
// instead of an exact one.
MOZ_ASSERT(aAncestorTransform.FuzzyEqualsMultiplicative(apzc->GetAncestorTransform()));
ParentLayerIntRegion clipRegion = ComputeClipRegion(state->mController, aLayer);
node->SetHitTestData(GetEventRegions(aLayer), aLayer.GetTransform(), Some(clipRegion),

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше