2012-04-19 02:33:13 +04:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
|
|
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
2012-11-14 23:46:40 +04:00
|
|
|
#if !defined(GStreamerReader_h_)
|
|
|
|
#define GStreamerReader_h_
|
2012-04-19 02:33:13 +04:00
|
|
|
|
2013-12-03 01:25:28 +04:00
|
|
|
#include <map>
|
|
|
|
|
2012-04-19 02:33:13 +04:00
|
|
|
#include <gst/gst.h>
|
|
|
|
#include <gst/app/gstappsrc.h>
|
|
|
|
#include <gst/app/gstappsink.h>
|
2013-06-12 02:58:37 +04:00
|
|
|
// This include trips -Wreserved-user-defined-literal on clang. Ignoring it
|
|
|
|
// trips -Wpragmas on GCC (unknown warning), but ignoring that trips
|
|
|
|
// -Wunknown-pragmas on clang (unknown pragma).
|
2013-06-01 02:57:43 +04:00
|
|
|
#pragma GCC diagnostic push
|
2013-06-12 02:58:37 +04:00
|
|
|
#pragma GCC diagnostic ignored "-Wunknown-pragmas"
|
2013-06-11 06:13:09 +04:00
|
|
|
#pragma GCC diagnostic ignored "-Wpragmas"
|
2013-06-01 02:57:43 +04:00
|
|
|
#pragma GCC diagnostic ignored "-Wreserved-user-defined-literal"
|
2012-04-19 02:33:13 +04:00
|
|
|
#include <gst/video/video.h>
|
2013-06-01 02:57:43 +04:00
|
|
|
#pragma GCC diagnostic pop
|
2013-12-03 01:25:28 +04:00
|
|
|
|
2012-11-14 23:46:40 +04:00
|
|
|
#include "MediaDecoderReader.h"
|
2013-12-03 01:25:28 +04:00
|
|
|
#include "MP3FrameParser.h"
|
2014-02-11 18:22:45 +04:00
|
|
|
#include "ImageContainer.h"
|
2013-09-06 00:25:17 +04:00
|
|
|
#include "nsRect.h"
|
2012-04-19 02:33:13 +04:00
|
|
|
|
2013-07-15 11:39:00 +04:00
|
|
|
struct GstURIDecodeBin;
|
|
|
|
|
2012-11-14 23:45:33 +04:00
|
|
|
namespace mozilla {
|
|
|
|
|
2013-03-03 21:01:05 +04:00
|
|
|
namespace dom {
|
|
|
|
class TimeRanges;
|
|
|
|
}
|
|
|
|
|
2012-11-19 19:11:21 +04:00
|
|
|
class AbstractMediaDecoder;
|
2012-11-14 23:45:33 +04:00
|
|
|
|
2012-11-14 23:46:40 +04:00
|
|
|
class GStreamerReader : public MediaDecoderReader
|
2012-04-19 02:33:13 +04:00
|
|
|
{
|
2014-02-09 12:04:38 +04:00
|
|
|
typedef gfx::IntRect IntRect;
|
|
|
|
|
2012-04-19 02:33:13 +04:00
|
|
|
public:
|
2014-09-02 17:46:56 +04:00
|
|
|
explicit GStreamerReader(AbstractMediaDecoder* aDecoder);
|
2012-11-14 23:46:40 +04:00
|
|
|
virtual ~GStreamerReader();
|
2012-04-19 02:33:13 +04:00
|
|
|
|
2015-01-27 05:09:59 +03:00
|
|
|
virtual nsresult Init(MediaDecoderReader* aCloneDonor) MOZ_OVERRIDE;
|
|
|
|
virtual nsresult ResetDecode() MOZ_OVERRIDE;
|
|
|
|
virtual bool DecodeAudioData() MOZ_OVERRIDE;
|
2012-04-19 02:33:13 +04:00
|
|
|
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
|
2015-01-27 05:09:59 +03:00
|
|
|
int64_t aTimeThreshold) MOZ_OVERRIDE;
|
2013-09-27 09:22:38 +04:00
|
|
|
virtual nsresult ReadMetadata(MediaInfo* aInfo,
|
2015-01-27 05:09:59 +03:00
|
|
|
MetadataTags** aTags) MOZ_OVERRIDE;
|
2014-12-16 12:52:57 +03:00
|
|
|
virtual nsRefPtr<SeekPromise>
|
2015-01-16 21:57:59 +03:00
|
|
|
Seek(int64_t aTime, int64_t aEndTime) MOZ_OVERRIDE;
|
2015-01-27 05:09:59 +03:00
|
|
|
virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) MOZ_OVERRIDE;
|
2012-04-19 02:33:13 +04:00
|
|
|
|
2013-12-03 01:25:28 +04:00
|
|
|
virtual void NotifyDataArrived(const char *aBuffer,
|
|
|
|
uint32_t aLength,
|
|
|
|
int64_t aOffset) MOZ_OVERRIDE;
|
|
|
|
|
2015-01-27 05:09:59 +03:00
|
|
|
virtual bool HasAudio() MOZ_OVERRIDE {
|
2013-09-27 09:22:38 +04:00
|
|
|
return mInfo.HasAudio();
|
2012-04-19 02:33:13 +04:00
|
|
|
}
|
|
|
|
|
2015-01-27 05:09:59 +03:00
|
|
|
virtual bool HasVideo() MOZ_OVERRIDE {
|
2013-09-27 09:22:38 +04:00
|
|
|
return mInfo.HasVideo();
|
2012-04-19 02:33:13 +04:00
|
|
|
}
|
|
|
|
|
2014-02-11 18:22:45 +04:00
|
|
|
layers::ImageContainer* GetImageContainer() { return mDecoder->GetImageContainer(); }
|
|
|
|
|
2014-06-23 14:08:34 +04:00
|
|
|
virtual bool IsMediaSeekable() MOZ_OVERRIDE;
|
|
|
|
|
2012-04-19 02:33:13 +04:00
|
|
|
private:
|
|
|
|
|
|
|
|
void ReadAndPushData(guint aLength);
|
2014-02-11 18:22:45 +04:00
|
|
|
nsRefPtr<layers::PlanarYCbCrImage> GetImageFromBuffer(GstBuffer* aBuffer);
|
|
|
|
void CopyIntoImageBuffer(GstBuffer *aBuffer, GstBuffer** aOutBuffer, nsRefPtr<layers::PlanarYCbCrImage> &image);
|
|
|
|
GstCaps* BuildAudioSinkCaps();
|
|
|
|
void InstallPadCallbacks();
|
|
|
|
|
|
|
|
#if GST_VERSION_MAJOR >= 1
|
|
|
|
void ImageDataFromVideoFrame(GstVideoFrame *aFrame, layers::PlanarYCbCrImage::Data *aData);
|
|
|
|
#endif
|
2012-04-19 02:33:13 +04:00
|
|
|
|
2013-04-11 12:46:46 +04:00
|
|
|
/* Called once the pipeline is setup to check that the stream only contains
|
|
|
|
* supported formats
|
|
|
|
*/
|
|
|
|
nsresult CheckSupportedFormats();
|
|
|
|
|
2012-04-19 02:33:13 +04:00
|
|
|
/* Gst callbacks */
|
|
|
|
|
2013-06-22 03:16:53 +04:00
|
|
|
static GstBusSyncReply ErrorCb(GstBus *aBus, GstMessage *aMessage, gpointer aUserData);
|
|
|
|
GstBusSyncReply Error(GstBus *aBus, GstMessage *aMessage);
|
|
|
|
|
2013-07-15 11:39:00 +04:00
|
|
|
/*
|
|
|
|
* We attach this callback to playbin so that when uridecodebin is
|
|
|
|
* constructed, we can then list for its autoplug-sort signal to blacklist
|
|
|
|
* the elements it can construct.
|
|
|
|
*/
|
|
|
|
static void ElementAddedCb(GstBin *aPlayBin,
|
|
|
|
GstElement *aElement,
|
|
|
|
gpointer aUserData);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Called on the autoplug-sort signal emitted by uridecodebin for filtering
|
|
|
|
* the elements it uses.
|
|
|
|
*/
|
|
|
|
static GValueArray *ElementFilterCb(GstURIDecodeBin *aBin,
|
|
|
|
GstPad *aPad,
|
|
|
|
GstCaps *aCaps,
|
|
|
|
GValueArray *aFactories,
|
|
|
|
gpointer aUserData);
|
|
|
|
|
|
|
|
GValueArray *ElementFilter(GstURIDecodeBin *aBin,
|
|
|
|
GstPad *aPad,
|
|
|
|
GstCaps *aCaps,
|
|
|
|
GValueArray *aFactories);
|
|
|
|
|
2012-04-19 02:33:13 +04:00
|
|
|
/* Called on the source-setup signal emitted by playbin. Used to
|
|
|
|
* configure appsrc .
|
|
|
|
*/
|
2013-03-14 00:11:15 +04:00
|
|
|
static void PlayBinSourceSetupCb(GstElement* aPlayBin,
|
|
|
|
GParamSpec* pspec,
|
2012-04-19 02:33:13 +04:00
|
|
|
gpointer aUserData);
|
2013-03-14 00:11:15 +04:00
|
|
|
void PlayBinSourceSetup(GstAppSrc* aSource);
|
2012-04-19 02:33:13 +04:00
|
|
|
|
|
|
|
/* Called from appsrc when we need to read more data from the resource */
|
2013-03-14 00:11:15 +04:00
|
|
|
static void NeedDataCb(GstAppSrc* aSrc, guint aLength, gpointer aUserData);
|
|
|
|
void NeedData(GstAppSrc* aSrc, guint aLength);
|
2012-04-19 02:33:13 +04:00
|
|
|
|
|
|
|
/* Called when appsrc has enough data and we can stop reading */
|
2013-03-14 00:11:15 +04:00
|
|
|
static void EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData);
|
|
|
|
void EnoughData(GstAppSrc* aSrc);
|
2012-04-19 02:33:13 +04:00
|
|
|
|
|
|
|
/* Called when a seek is issued on the pipeline */
|
2013-03-14 00:11:15 +04:00
|
|
|
static gboolean SeekDataCb(GstAppSrc* aSrc,
|
2012-04-19 02:33:13 +04:00
|
|
|
guint64 aOffset,
|
|
|
|
gpointer aUserData);
|
2013-03-14 00:11:15 +04:00
|
|
|
gboolean SeekData(GstAppSrc* aSrc, guint64 aOffset);
|
2012-04-19 02:33:13 +04:00
|
|
|
|
|
|
|
/* Called when events reach the sinks. See inline comments */
|
2014-02-11 18:22:45 +04:00
|
|
|
#if GST_VERSION_MAJOR == 1
|
|
|
|
static GstPadProbeReturn EventProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
|
|
|
|
GstPadProbeReturn EventProbe(GstPad *aPad, GstEvent *aEvent);
|
|
|
|
#else
|
2013-03-14 00:11:15 +04:00
|
|
|
static gboolean EventProbeCb(GstPad* aPad, GstEvent* aEvent, gpointer aUserData);
|
|
|
|
gboolean EventProbe(GstPad* aPad, GstEvent* aEvent);
|
2014-02-11 18:22:45 +04:00
|
|
|
#endif
|
2013-03-14 00:11:15 +04:00
|
|
|
|
2014-02-11 18:22:45 +04:00
|
|
|
/* Called when the video part of the pipeline allocates buffers. Used to
|
|
|
|
* provide PlanarYCbCrImage backed GstBuffers to the pipeline so that a memory
|
|
|
|
* copy can be avoided when handling YUV buffers from the pipeline to the gfx
|
|
|
|
* side.
|
2013-03-14 00:11:15 +04:00
|
|
|
*/
|
2014-02-11 18:22:45 +04:00
|
|
|
#if GST_VERSION_MAJOR == 1
|
|
|
|
static GstPadProbeReturn QueryProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
|
|
|
|
GstPadProbeReturn QueryProbe(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData);
|
|
|
|
#else
|
2013-03-14 00:11:15 +04:00
|
|
|
static GstFlowReturn AllocateVideoBufferCb(GstPad* aPad, guint64 aOffset, guint aSize,
|
|
|
|
GstCaps* aCaps, GstBuffer** aBuf);
|
|
|
|
GstFlowReturn AllocateVideoBufferFull(GstPad* aPad, guint64 aOffset, guint aSize,
|
|
|
|
GstCaps* aCaps, GstBuffer** aBuf, nsRefPtr<layers::PlanarYCbCrImage>& aImage);
|
|
|
|
GstFlowReturn AllocateVideoBuffer(GstPad* aPad, guint64 aOffset, guint aSize,
|
|
|
|
GstCaps* aCaps, GstBuffer** aBuf);
|
2014-02-11 18:22:45 +04:00
|
|
|
#endif
|
|
|
|
|
2012-04-19 02:33:13 +04:00
|
|
|
|
|
|
|
/* Called when the pipeline is prerolled, that is when at start or after a
|
|
|
|
* seek, the first audio and video buffers are queued in the sinks.
|
|
|
|
*/
|
2013-03-14 00:11:15 +04:00
|
|
|
static GstFlowReturn NewPrerollCb(GstAppSink* aSink, gpointer aUserData);
|
2012-04-19 02:33:13 +04:00
|
|
|
void VideoPreroll();
|
|
|
|
void AudioPreroll();
|
|
|
|
|
|
|
|
/* Called when buffers reach the sinks */
|
2013-03-14 00:11:15 +04:00
|
|
|
static GstFlowReturn NewBufferCb(GstAppSink* aSink, gpointer aUserData);
|
2012-04-19 02:33:13 +04:00
|
|
|
void NewVideoBuffer();
|
|
|
|
void NewAudioBuffer();
|
|
|
|
|
|
|
|
/* Called at end of stream, when decoding has finished */
|
2013-03-14 00:11:15 +04:00
|
|
|
static void EosCb(GstAppSink* aSink, gpointer aUserData);
|
2014-02-15 13:32:12 +04:00
|
|
|
/* Notifies that a sink will no longer receive any more data. If nullptr
|
|
|
|
* is passed to this, we'll assume all streams have reached EOS (for example
|
|
|
|
* an error has occurred). */
|
|
|
|
void Eos(GstAppSink* aSink = nullptr);
|
2012-04-19 02:33:13 +04:00
|
|
|
|
2014-02-06 02:46:04 +04:00
|
|
|
/* Called when an element is added inside playbin. We use it to find the
|
|
|
|
* decodebin instance.
|
|
|
|
*/
|
|
|
|
static void PlayElementAddedCb(GstBin *aBin, GstElement *aElement,
|
|
|
|
gpointer *aUserData);
|
|
|
|
|
2014-12-16 19:41:36 +03:00
|
|
|
/* Called during decoding, to decide whether a (sub)stream should be decoded or
|
|
|
|
* ignored */
|
2014-02-06 02:46:04 +04:00
|
|
|
static bool ShouldAutoplugFactory(GstElementFactory* aFactory, GstCaps* aCaps);
|
|
|
|
|
|
|
|
/* Called by decodebin during autoplugging. We use it to apply our
|
2013-07-15 11:39:00 +04:00
|
|
|
* container/codec blacklist.
|
2014-02-06 02:46:04 +04:00
|
|
|
*/
|
|
|
|
static GValueArray* AutoplugSortCb(GstElement* aElement,
|
|
|
|
GstPad* aPad, GstCaps* aCaps,
|
|
|
|
GValueArray* aFactories);
|
|
|
|
|
2013-12-03 01:25:28 +04:00
|
|
|
// Try to find MP3 headers in this stream using our MP3 frame parser.
|
|
|
|
nsresult ParseMP3Headers();
|
|
|
|
|
2014-05-08 07:49:31 +04:00
|
|
|
// Get the length of the stream, excluding any metadata we have ignored at the
|
|
|
|
// start of the stream: ID3 headers, for example.
|
|
|
|
int64_t GetDataLength();
|
|
|
|
|
2013-12-03 01:25:28 +04:00
|
|
|
// Use our own MP3 parser here, largely for consistency with other platforms.
|
|
|
|
MP3FrameParser mMP3FrameParser;
|
|
|
|
|
2014-05-08 07:49:31 +04:00
|
|
|
// The byte position in the stream where the actual media (ignoring, for
|
|
|
|
// example, ID3 tags) starts.
|
|
|
|
uint64_t mDataOffset;
|
|
|
|
|
2013-12-03 01:25:28 +04:00
|
|
|
// We want to be able to decide in |ReadMetadata| whether or not we use the
|
|
|
|
// duration from the MP3 frame parser, as this backend supports more than just
|
|
|
|
// MP3. But |NotifyDataArrived| can update the duration and is often called
|
|
|
|
// _before_ |ReadMetadata|. This flag stops the former from using the parser
|
|
|
|
// duration until we are sure we want to.
|
|
|
|
bool mUseParserDuration;
|
|
|
|
int64_t mLastParserDuration;
|
|
|
|
|
2014-02-11 18:22:45 +04:00
|
|
|
#if GST_VERSION_MAJOR >= 1
|
|
|
|
GstAllocator *mAllocator;
|
|
|
|
GstBufferPool *mBufferPool;
|
|
|
|
GstVideoInfo mVideoInfo;
|
|
|
|
#endif
|
2013-03-14 00:11:15 +04:00
|
|
|
GstElement* mPlayBin;
|
|
|
|
GstBus* mBus;
|
|
|
|
GstAppSrc* mSource;
|
2012-04-19 02:33:13 +04:00
|
|
|
/* video sink bin */
|
2013-03-14 00:11:15 +04:00
|
|
|
GstElement* mVideoSink;
|
2012-04-19 02:33:13 +04:00
|
|
|
/* the actual video app sink */
|
2013-03-14 00:11:15 +04:00
|
|
|
GstAppSink* mVideoAppSink;
|
2012-04-19 02:33:13 +04:00
|
|
|
/* audio sink bin */
|
2013-03-14 00:11:15 +04:00
|
|
|
GstElement* mAudioSink;
|
2012-04-19 02:33:13 +04:00
|
|
|
/* the actual audio app sink */
|
2013-03-14 00:11:15 +04:00
|
|
|
GstAppSink* mAudioAppSink;
|
2012-04-19 02:33:13 +04:00
|
|
|
GstVideoFormat mFormat;
|
2014-02-09 12:04:38 +04:00
|
|
|
IntRect mPicture;
|
2012-04-19 02:33:13 +04:00
|
|
|
int mVideoSinkBufferCount;
|
|
|
|
int mAudioSinkBufferCount;
|
|
|
|
GstAppSrcCallbacks mSrcCallbacks;
|
|
|
|
GstAppSinkCallbacks mSinkCallbacks;
|
|
|
|
/* monitor used to synchronize access to shared state between gstreamer
|
|
|
|
* threads and other gecko threads */
|
2012-11-14 23:46:40 +04:00
|
|
|
ReentrantMonitor mGstThreadsMonitor;
|
2012-04-19 02:33:13 +04:00
|
|
|
/* video and audio segments we use to convert absolute timestamps to [0,
|
|
|
|
* stream_duration]. They're set when the pipeline is started or after a seek.
|
|
|
|
* Concurrent access guarded with mGstThreadsMonitor.
|
|
|
|
*/
|
|
|
|
GstSegment mVideoSegment;
|
|
|
|
GstSegment mAudioSegment;
|
|
|
|
/* bool used to signal when gst has detected the end of stream and
|
|
|
|
* DecodeAudioData and DecodeVideoFrame should not expect any more data
|
|
|
|
*/
|
2014-02-15 13:32:12 +04:00
|
|
|
bool mReachedAudioEos;
|
|
|
|
bool mReachedVideoEos;
|
2014-02-11 18:22:45 +04:00
|
|
|
#if GST_VERSION_MAJOR >= 1
|
|
|
|
bool mConfigureAlignment;
|
|
|
|
#endif
|
2012-04-19 02:33:13 +04:00
|
|
|
int fpsNum;
|
|
|
|
int fpsDen;
|
|
|
|
};
|
|
|
|
|
2012-11-14 23:45:33 +04:00
|
|
|
} // namespace mozilla
|
|
|
|
|
2012-04-19 02:33:13 +04:00
|
|
|
#endif
|