diff --git a/config/autoconf.mk.in b/config/autoconf.mk.in index 0501257c1f8..7930a5ee720 100644 --- a/config/autoconf.mk.in +++ b/config/autoconf.mk.in @@ -171,6 +171,7 @@ MOZ_VORBIS = @MOZ_VORBIS@ MOZ_TREMOR = @MOZ_TREMOR@ MOZ_NO_THEORA_ASM = @MOZ_NO_THEORA_ASM@ MOZ_WEBM = @MOZ_WEBM@ +MOZ_GSTREAMER = @MOZ_GSTREAMER@ MOZ_VP8_ERROR_CONCEALMENT = @MOZ_VP8_ERROR_CONCEALMENT@ MOZ_VP8_ENCODER = @MOZ_VP8_ENCODER@ VPX_AS = @VPX_AS@ @@ -662,6 +663,9 @@ MOZ_ENABLE_LIBCONIC = @MOZ_ENABLE_LIBCONIC@ LIBCONIC_CFLAGS = @LIBCONIC_CFLAGS@ LIBCONIC_LIBS = @LIBCONIC_LIBS@ +GSTREAMER_CFLAGS = @GSTREAMER_CFLAGS@ +GSTREAMER_LIBS = @GSTREAMER_LIBS@ + MACOS_SDK_DIR = @MACOS_SDK_DIR@ NEXT_ROOT = @NEXT_ROOT@ GCC_VERSION = @GCC_VERSION@ diff --git a/config/system-headers b/config/system-headers index c0f022162ef..1486194c042 100644 --- a/config/system-headers +++ b/config/system-headers @@ -1055,3 +1055,7 @@ ogg/os_types.h nestegg/nestegg.h cubeb/cubeb.h #endif +gst/gst.h +gst/app/gstappsink.h +gst/app/gstappsrc.h +gst/video/video.h diff --git a/configure.in b/configure.in index 2584cb1c239..15161e1e065 100644 --- a/configure.in +++ b/configure.in @@ -5794,6 +5794,49 @@ linux*) esac fi +dnl ======================================================== +dnl = Enable GStreamer +dnl ======================================================== +MOZ_ARG_ENABLE_BOOL(gstreamer, +[ --enable-gstreamer Enable GStreamer support], +MOZ_GSTREAMER=1, +MOZ_GSTREAMER=) + +if test "$MOZ_GSTREAMER"; then + # API version, eg 0.10, 1.0 etc + GST_API_VERSION=0.10 + # core/base release number + # depend on >= 0.10.33 as that's when the playbin2 source-setup signal was + # introduced + GST_VERSION=0.10.33 + PKG_CHECK_MODULES(GSTREAMER, + gstreamer-$GST_API_VERSION >= $GST_VERSION + gstreamer-app-$GST_API_VERSION + gstreamer-plugins-base-$GST_API_VERSION) + if test -n "$GSTREAMER_LIBS"; then + _SAVE_LDFLAGS=$LDFLAGS + LDFLAGS="$LDFLAGS -lgstvideo-$GST_API_VERSION" + AC_TRY_LINK(,[return 0;],_HAVE_LIBGSTVIDEO=1,_HAVE_LIBGSTVIDEO=) + if test -n "$_HAVE_LIBGSTVIDEO" ; then + GSTREAMER_LIBS="$GSTREAMER_LIBS -lgstvideo-$GST_API_VERSION" + else + AC_MSG_ERROR([gstreamer video backend requires libgstvideo]) + fi + LDFLAGS=$_SAVE_LDFLAGS + else + AC_MSG_ERROR([gstreamer backend requires the gstreamer packages]) + fi +fi +AC_SUBST(GSTREAMER_CFLAGS) +AC_SUBST(GSTREAMER_LIBS) +AC_SUBST(MOZ_GSTREAMER) + +if test -n "$MOZ_GSTREAMER"; then + AC_DEFINE(MOZ_GSTREAMER) + MOZ_MEDIA=1 +fi + + dnl ======================================================== dnl Permissions System dnl ======================================================== diff --git a/content/html/content/public/nsHTMLMediaElement.h b/content/html/content/public/nsHTMLMediaElement.h index bf3d3a84ef4..33ef53536cc 100644 --- a/content/html/content/public/nsHTMLMediaElement.h +++ b/content/html/content/public/nsHTMLMediaElement.h @@ -302,6 +302,13 @@ public: static char const *const gWebMCodecs[4]; #endif +#ifdef MOZ_GSTREAMER + static bool IsH264Enabled(); + static bool IsH264Type(const nsACString& aType); + static const char gH264Types[3][17]; + static char const *const gH264Codecs[6]; +#endif + /** * Called when a child source element is added to this media element. This * may queue a task to run the select resource algorithm if appropriate. diff --git a/content/html/content/src/nsHTMLMediaElement.cpp b/content/html/content/src/nsHTMLMediaElement.cpp index 7a7e1e9acce..4cf4f207d51 100644 --- a/content/html/content/src/nsHTMLMediaElement.cpp +++ b/content/html/content/src/nsHTMLMediaElement.cpp @@ -106,6 +106,9 @@ #ifdef MOZ_RAW #include "nsRawDecoder.h" #endif +#ifdef MOZ_GSTREAMER +#include "nsGStreamerDecoder.h" +#endif #ifdef PR_LOGGING static PRLogModuleInfo* gMediaElementLog; @@ -1846,6 +1849,45 @@ nsHTMLMediaElement::IsWebMType(const nsACString& aType) } #endif +#ifdef MOZ_GSTREAMER +const char nsHTMLMediaElement::gH264Types[3][17] = { + "video/mp4", + "video/3gpp", + "video/quicktime", +}; + +char const *const nsHTMLMediaElement::gH264Codecs[6] = { + "avc1.42E01E", + "avc1.58A01E", + "avc1.4D401E", + "avc1.64001E", + "mp4a.40.2", + nsnull +}; + +bool +nsHTMLMediaElement::IsH264Enabled() +{ + return Preferences::GetBool("media.h264.enabled"); +} + +bool +nsHTMLMediaElement::IsH264Type(const nsACString& aType) +{ + if (!IsH264Enabled()) { + return false; + } + + for (PRUint32 i = 0; i < ArrayLength(gH264Types); ++i) { + if (aType.EqualsASCII(gH264Types[i])) { + return true; + } + } + + return false; +} +#endif + /* static */ nsHTMLMediaElement::CanPlayStatus nsHTMLMediaElement::CanHandleMediaType(const char* aMIMEType, @@ -1875,6 +1917,13 @@ nsHTMLMediaElement::CanHandleMediaType(const char* aMIMEType, return CANPLAY_YES; } #endif + +#ifdef MOZ_GSTREAMER + if (IsH264Type(nsDependentCString(aMIMEType))) { + *aCodecList = gH264Codecs; + return CANPLAY_YES; + } +#endif return CANPLAY_NO; } @@ -1987,7 +2036,11 @@ nsHTMLMediaElement::CreateDecoder(const nsACString& aType) #endif #ifdef MOZ_OGG if (IsOggType(aType)) { +#ifdef MOZ_GSTREAMER + nsRefPtr decoder = new nsGStreamerDecoder(); +#else nsRefPtr decoder = new nsOggDecoder(); +#endif if (decoder->Init(this)) { return decoder.forget(); } @@ -2003,7 +2056,20 @@ nsHTMLMediaElement::CreateDecoder(const nsACString& aType) #endif #ifdef MOZ_WEBM if (IsWebMType(aType)) { +#ifdef MOZ_GSTREAMER + nsRefPtr decoder = new nsGStreamerDecoder(); +#else nsRefPtr decoder = new nsWebMDecoder(); +#endif + if (decoder->Init(this)) { + return decoder.forget(); + } + } +#endif + +#ifdef MOZ_GSTREAMER + if (IsH264Type(aType)) { + nsRefPtr decoder = new nsGStreamerDecoder(); if (decoder->Init(this)) { return decoder.forget(); } diff --git a/content/media/Makefile.in b/content/media/Makefile.in index 0eb9495dbcf..e670698df2d 100644 --- a/content/media/Makefile.in +++ b/content/media/Makefile.in @@ -96,6 +96,10 @@ ifdef MOZ_WEBM PARALLEL_DIRS += webm endif +ifdef MOZ_GSTREAMER +PARALLEL_DIRS += gstreamer +endif + ifdef ENABLE_TESTS PARALLEL_DIRS += test endif diff --git a/content/media/gstreamer/Makefile.in b/content/media/gstreamer/Makefile.in new file mode 100644 index 00000000000..3e3b217bb73 --- /dev/null +++ b/content/media/gstreamer/Makefile.in @@ -0,0 +1,38 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +DEPTH = ../../.. +topsrcdir = @top_srcdir@ +srcdir = @srcdir@ +VPATH = @srcdir@ + +include $(DEPTH)/config/autoconf.mk + +MODULE = content +LIBRARY_NAME = gkcongstreamer_s +LIBXUL_LIBRARY = 1 + + +EXPORTS += \ + nsGStreamerDecoder.h \ + $(NULL) + +CPPSRCS = \ + nsGStreamerReader.cpp \ + nsGStreamerDecoder.cpp \ + $(NULL) + +FORCE_STATIC_LIB = 1 + +include $(topsrcdir)/config/rules.mk + +CFLAGS += $(GSTREAMER_CFLAGS) +CXXFLAGS += $(GSTREAMER_CFLAGS) + + +INCLUDES += \ + -I$(srcdir)/../../base/src \ + -I$(srcdir)/../../html/content/src \ + $(NULL) + diff --git a/content/media/gstreamer/nsGStreamerDecoder.cpp b/content/media/gstreamer/nsGStreamerDecoder.cpp new file mode 100644 index 00000000000..d0455c345a5 --- /dev/null +++ b/content/media/gstreamer/nsGStreamerDecoder.cpp @@ -0,0 +1,14 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "nsBuiltinDecoderStateMachine.h" +#include "nsGStreamerReader.h" +#include "nsGStreamerDecoder.h" + +nsDecoderStateMachine* nsGStreamerDecoder::CreateStateMachine() +{ + return new nsBuiltinDecoderStateMachine(this, new nsGStreamerReader(this)); +} diff --git a/content/media/gstreamer/nsGStreamerDecoder.h b/content/media/gstreamer/nsGStreamerDecoder.h new file mode 100644 index 00000000000..69a38f798e4 --- /dev/null +++ b/content/media/gstreamer/nsGStreamerDecoder.h @@ -0,0 +1,19 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(nsGStreamerDecoder_h_) +#define nsGStreamerDecoder_h_ + +#include "nsBuiltinDecoder.h" + +class nsGStreamerDecoder : public nsBuiltinDecoder +{ +public: + virtual nsMediaDecoder* Clone() { return new nsGStreamerDecoder(); } + virtual nsDecoderStateMachine* CreateStateMachine(); +}; + +#endif diff --git a/content/media/gstreamer/nsGStreamerReader.cpp b/content/media/gstreamer/nsGStreamerReader.cpp new file mode 100644 index 00000000000..17374a31c1d --- /dev/null +++ b/content/media/gstreamer/nsGStreamerReader.cpp @@ -0,0 +1,815 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "nsError.h" +#include "nsBuiltinDecoderStateMachine.h" +#include "nsBuiltinDecoder.h" +#include "MediaResource.h" +#include "nsGStreamerReader.h" +#include "VideoUtils.h" +#include "nsTimeRanges.h" +#include "mozilla/Preferences.h" + +using namespace mozilla; +using namespace mozilla::layers; + +// Un-comment to enable logging of seek bisections. +//#define SEEK_LOGGING + +#ifdef PR_LOGGING +extern PRLogModuleInfo* gBuiltinDecoderLog; +#define LOG(type, msg) PR_LOG(gBuiltinDecoderLog, type, msg) +#else +#define LOG(type, msg) +#endif + +static const int MAX_CHANNELS = 4; +// Let the demuxer work in pull mode for short files +static const int SHORT_FILE_SIZE = 1024 * 1024; +// The default resource->Read() size when working in push mode +static const int DEFAULT_SOURCE_READ_SIZE = 50 * 1024; + +typedef enum { + GST_PLAY_FLAG_VIDEO = (1 << 0), + GST_PLAY_FLAG_AUDIO = (1 << 1), + GST_PLAY_FLAG_TEXT = (1 << 2), + GST_PLAY_FLAG_VIS = (1 << 3), + GST_PLAY_FLAG_SOFT_VOLUME = (1 << 4), + GST_PLAY_FLAG_NATIVE_AUDIO = (1 << 5), + GST_PLAY_FLAG_NATIVE_VIDEO = (1 << 6), + GST_PLAY_FLAG_DOWNLOAD = (1 << 7), + GST_PLAY_FLAG_BUFFERING = (1 << 8), + GST_PLAY_FLAG_DEINTERLACE = (1 << 9), + GST_PLAY_FLAG_SOFT_COLORBALANCE = (1 << 10) +} PlayFlags; + +nsGStreamerReader::nsGStreamerReader(nsBuiltinDecoder* aDecoder) + : nsBuiltinDecoderReader(aDecoder), + mPlayBin(NULL), + mBus(NULL), + mSource(NULL), + mVideoSink(NULL), + mVideoAppSink(NULL), + mAudioSink(NULL), + mAudioAppSink(NULL), + mFormat(GST_VIDEO_FORMAT_UNKNOWN), + mVideoSinkBufferCount(0), + mAudioSinkBufferCount(0), + mGstThreadsMonitor("media.gst.threads"), + mReachedEos(false), + mByteOffset(0), + mLastReportedByteOffset(0), + fpsNum(0), + fpsDen(0) +{ + MOZ_COUNT_CTOR(nsGStreamerReader); + + mSrcCallbacks.need_data = nsGStreamerReader::NeedDataCb; + mSrcCallbacks.enough_data = nsGStreamerReader::EnoughDataCb; + mSrcCallbacks.seek_data = nsGStreamerReader::SeekDataCb; + + mSinkCallbacks.eos = nsGStreamerReader::EosCb; + mSinkCallbacks.new_preroll = nsGStreamerReader::NewPrerollCb; + mSinkCallbacks.new_buffer = nsGStreamerReader::NewBufferCb; + mSinkCallbacks.new_buffer_list = NULL; + + gst_segment_init(&mVideoSegment, GST_FORMAT_UNDEFINED); + gst_segment_init(&mAudioSegment, GST_FORMAT_UNDEFINED); +} + +nsGStreamerReader::~nsGStreamerReader() +{ + MOZ_COUNT_DTOR(nsGStreamerReader); + ResetDecode(); + + if (mPlayBin) { + gst_app_src_end_of_stream(mSource); + gst_element_set_state(mPlayBin, GST_STATE_NULL); + gst_object_unref(mPlayBin); + mPlayBin = NULL; + mVideoSink = NULL; + mVideoAppSink = NULL; + mAudioSink = NULL; + mAudioAppSink = NULL; + gst_object_unref(mBus); + mBus = NULL; + } +} + +nsresult nsGStreamerReader::Init(nsBuiltinDecoderReader* aCloneDonor) +{ + GError *error = NULL; + if (!gst_init_check(0, 0, &error)) { + LOG(PR_LOG_ERROR, ("gst initialization failed: %s", error->message)); + g_error_free(error); + return NS_ERROR_FAILURE; + } + + mPlayBin = gst_element_factory_make("playbin2", NULL); + if (mPlayBin == NULL) { + LOG(PR_LOG_ERROR, ("couldn't create playbin2")); + return NS_ERROR_FAILURE; + } + g_object_set(mPlayBin, "buffer-size", 0, NULL); + mBus = gst_pipeline_get_bus(GST_PIPELINE(mPlayBin)); + + mVideoSink = gst_parse_bin_from_description("capsfilter name=filter ! " + "appsink name=videosink sync=true max-buffers=1 " + "caps=video/x-raw-yuv,format=(fourcc)I420" + , TRUE, NULL); + mVideoAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mVideoSink), + "videosink")); + gst_app_sink_set_callbacks(mVideoAppSink, &mSinkCallbacks, + (gpointer) this, NULL); + GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink"); + gst_pad_add_event_probe(sinkpad, + G_CALLBACK(&nsGStreamerReader::EventProbeCb), this); + gst_object_unref(sinkpad); + + mAudioSink = gst_parse_bin_from_description("capsfilter name=filter ! " + "appsink name=audiosink sync=true caps=audio/x-raw-float," + "channels={1,2},rate=44100,width=32,endianness=1234", TRUE, NULL); + mAudioAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mAudioSink), + "audiosink")); + gst_app_sink_set_callbacks(mAudioAppSink, &mSinkCallbacks, + (gpointer) this, NULL); + sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink"); + gst_pad_add_event_probe(sinkpad, + G_CALLBACK(&nsGStreamerReader::EventProbeCb), this); + gst_object_unref(sinkpad); + + g_object_set(mPlayBin, "uri", "appsrc://", + "video-sink", mVideoSink, + "audio-sink", mAudioSink, + NULL); + + g_object_connect(mPlayBin, "signal::source-setup", + nsGStreamerReader::PlayBinSourceSetupCb, this, NULL); + + return NS_OK; +} + +void nsGStreamerReader::PlayBinSourceSetupCb(GstElement *aPlayBin, + GstElement *aSource, + gpointer aUserData) +{ + nsGStreamerReader *reader = reinterpret_cast(aUserData); + reader->PlayBinSourceSetup(GST_APP_SRC(aSource)); +} + +void nsGStreamerReader::PlayBinSourceSetup(GstAppSrc *aSource) +{ + mSource = GST_APP_SRC(aSource); + gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, NULL); + MediaResource* resource = mDecoder->GetResource(); + PRInt64 len = resource->GetLength(); + gst_app_src_set_size(mSource, len); + if (resource->IsDataCachedToEndOfResource(0) || + (len != -1 && len <= SHORT_FILE_SIZE)) { + /* let the demuxer work in pull mode for local files (or very short files) + * so that we get optimal seeking accuracy/performance + */ + LOG(PR_LOG_ERROR, ("configuring random access")); + gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS); + } else { + /* make the demuxer work in push mode so that seeking is kept to a minimum + */ + gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE); + } +} + +nsresult nsGStreamerReader::ReadMetadata(nsVideoInfo* aInfo) +{ + NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); + nsresult ret = NS_OK; + + /* We do 3 attempts here: decoding audio and video, decoding video only, + * decoding audio only. This allows us to play streams that have one broken + * stream but that are otherwise decodeable. + */ + guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO, + ~GST_PLAY_FLAG_AUDIO, ~GST_PLAY_FLAG_VIDEO}; + guint default_flags, current_flags; + g_object_get(mPlayBin, "flags", &default_flags, NULL); + + GstMessage *message = NULL; + for (int i=0; i < G_N_ELEMENTS(flags); i++) { + current_flags = default_flags & flags[i]; + g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, NULL); + + /* reset filter caps to ANY */ + GstCaps *caps = gst_caps_new_any(); + GstElement *filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); + g_object_set(filter, "caps", caps, NULL); + gst_object_unref(filter); + + filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); + g_object_set(filter, "caps", caps, NULL); + gst_object_unref(filter); + gst_caps_unref(caps); + filter = NULL; + + if (!(current_flags & GST_PLAY_FLAG_AUDIO)) + filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); + else if (!(current_flags & GST_PLAY_FLAG_VIDEO)) + filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); + + if (filter) { + /* Little trick: set the target caps to "skip" so that playbin2 fails to + * find a decoder for the stream we want to skip. + */ + GstCaps *filterCaps = gst_caps_new_simple ("skip", NULL); + g_object_set(filter, "caps", filterCaps, NULL); + gst_caps_unref(filterCaps); + gst_object_unref(filter); + } + + /* start the pipeline */ + gst_element_set_state(mPlayBin, GST_STATE_PAUSED); + + /* Wait for ASYNC_DONE, which is emitted when the pipeline is built, + * prerolled and ready to play. Also watch for errors. + */ + message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE, + (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR)); + if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) { + GError *error; + gchar *debug; + + gst_message_parse_error(message, &error, &debug); + LOG(PR_LOG_ERROR, ("read metadata error: %s: %s", error->message, + debug)); + g_error_free(error); + g_free(debug); + gst_element_set_state(mPlayBin, GST_STATE_NULL); + gst_message_unref(message); + ret = NS_ERROR_FAILURE; + } else { + gst_message_unref(message); + ret = NS_OK; + break; + } + } + + if (NS_FAILED(ret)) + /* we couldn't get this to play */ + return ret; + + /* FIXME: workaround for a bug in matroskademux. This seek makes matroskademux + * parse the index */ + if (gst_element_seek_simple(mPlayBin, GST_FORMAT_TIME, + GST_SEEK_FLAG_FLUSH, 0)) { + /* after a seek we need to wait again for ASYNC_DONE */ + message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE, + (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR)); + if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) { + gst_element_set_state(mPlayBin, GST_STATE_NULL); + gst_message_unref(message); + return NS_ERROR_FAILURE; + } + } + + /* report the duration */ + gint64 duration; + GstFormat format = GST_FORMAT_TIME; + if (gst_element_query_duration(GST_ELEMENT(mPlayBin), + &format, &duration) && format == GST_FORMAT_TIME) { + ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); + LOG(PR_LOG_DEBUG, ("returning duration %"GST_TIME_FORMAT, + GST_TIME_ARGS (duration))); + duration = GST_TIME_AS_USECONDS (duration); + mDecoder->GetStateMachine()->SetDuration(duration); + } + + int n_video = 0, n_audio = 0; + g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, NULL); + mInfo.mHasVideo = n_video != 0; + mInfo.mHasAudio = n_audio != 0; + + *aInfo = mInfo; + + /* set the pipeline to PLAYING so that it starts decoding and queueing data in + * the appsinks */ + gst_element_set_state(mPlayBin, GST_STATE_PLAYING); + + return NS_OK; +} + +nsresult nsGStreamerReader::ResetDecode() +{ + nsresult res = NS_OK; + + if (NS_FAILED(nsBuiltinDecoderReader::ResetDecode())) { + res = NS_ERROR_FAILURE; + } + + mVideoQueue.Reset(); + mAudioQueue.Reset(); + + mVideoSinkBufferCount = 0; + mAudioSinkBufferCount = 0; + mReachedEos = false; + mLastReportedByteOffset = 0; + mByteOffset = 0; + + return res; +} + +void nsGStreamerReader::NotifyBytesConsumed() +{ + NS_ASSERTION(mByteOffset >= mLastReportedByteOffset, + "current byte offset less than prev offset"); + mDecoder->NotifyBytesConsumed(mByteOffset - mLastReportedByteOffset); + mLastReportedByteOffset = mByteOffset; +} + +bool nsGStreamerReader::WaitForDecodedData(int *aCounter) +{ + ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); + + /* Report consumed bytes from here as we can't do it from gst threads */ + NotifyBytesConsumed(); + while(*aCounter == 0) { + if (mReachedEos) { + return false; + } + mon.Wait(); + NotifyBytesConsumed(); + } + (*aCounter)--; + + return true; +} + +bool nsGStreamerReader::DecodeAudioData() +{ + NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); + + if (!WaitForDecodedData(&mAudioSinkBufferCount)) { + mAudioQueue.Finish(); + return false; + } + + GstBuffer *buffer = gst_app_sink_pull_buffer(mAudioAppSink); + PRInt64 timestamp = GST_BUFFER_TIMESTAMP(buffer); + timestamp = gst_segment_to_stream_time(&mAudioSegment, + GST_FORMAT_TIME, timestamp); + timestamp = GST_TIME_AS_USECONDS(timestamp); + PRInt64 duration = 0; + if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer))) + duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer)); + + PRInt64 offset = GST_BUFFER_OFFSET(buffer); + unsigned int size = GST_BUFFER_SIZE(buffer); + PRInt32 frames = (size / sizeof(AudioDataValue)) / mInfo.mAudioChannels; + ssize_t outSize = static_cast(size / sizeof(AudioDataValue)); + nsAutoArrayPtr data(new AudioDataValue[outSize]); + memcpy(data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer)); + AudioData *audio = new AudioData(offset, timestamp, duration, + frames, data.forget(), mInfo.mAudioChannels); + + mAudioQueue.Push(audio); + gst_buffer_unref(buffer); + + if (mAudioQueue.GetSize() < 2) { + nsCOMPtr event = + NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::NextFrameAvailable); + NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL); + } + + return true; +} + +bool nsGStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip, + PRInt64 aTimeThreshold) +{ + NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); + + GstBuffer *buffer = NULL; + PRInt64 timestamp, nextTimestamp; + while (true) + { + if (!WaitForDecodedData(&mVideoSinkBufferCount)) { + mVideoQueue.Finish(); + break; + } + mDecoder->GetFrameStatistics().NotifyDecodedFrames(0, 1); + + buffer = gst_app_sink_pull_buffer(mVideoAppSink); + bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT); + if ((aKeyFrameSkip && !isKeyframe)) { + gst_buffer_unref(buffer); + buffer = NULL; + continue; + } + + timestamp = GST_BUFFER_TIMESTAMP(buffer); + { + ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); + timestamp = gst_segment_to_stream_time(&mVideoSegment, + GST_FORMAT_TIME, timestamp); + } + NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp), + "frame has invalid timestamp"); + timestamp = nextTimestamp = GST_TIME_AS_USECONDS(timestamp); + if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer))) + nextTimestamp += GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer)); + else if (fpsNum && fpsDen) + /* add 1-frame duration */ + nextTimestamp += gst_util_uint64_scale(GST_USECOND, fpsNum, fpsDen); + + if (timestamp < aTimeThreshold) { + LOG(PR_LOG_DEBUG, ("skipping frame %"GST_TIME_FORMAT + " threshold %"GST_TIME_FORMAT, + GST_TIME_ARGS(timestamp), GST_TIME_ARGS(aTimeThreshold))); + gst_buffer_unref(buffer); + buffer = NULL; + continue; + } + + break; + } + + if (buffer == NULL) + /* no more frames */ + return false; + + guint8 *data = GST_BUFFER_DATA(buffer); + + int width = mPicture.width; + int height = mPicture.height; + GstVideoFormat format = mFormat; + + VideoData::YCbCrBuffer b; + for(int i = 0; i < 3; i++) { + b.mPlanes[i].mData = data + gst_video_format_get_component_offset(format, i, + width, height); + b.mPlanes[i].mStride = gst_video_format_get_row_stride(format, i, width); + b.mPlanes[i].mHeight = gst_video_format_get_component_height(format, + i, height); + b.mPlanes[i].mWidth = gst_video_format_get_component_width(format, + i, width); + } + + bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, + GST_BUFFER_FLAG_DELTA_UNIT); + /* XXX ? */ + PRInt64 offset = 0; + VideoData *video = VideoData::Create(mInfo, + mDecoder->GetImageContainer(), + offset, + timestamp, + nextTimestamp, + b, + isKeyframe, + -1, + mPicture); + mVideoQueue.Push(video); + gst_buffer_unref(buffer); + + if (mVideoQueue.GetSize() < 2) { + nsCOMPtr event = + NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::NextFrameAvailable); + NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL); + } + + return true; +} + +nsresult nsGStreamerReader::Seek(PRInt64 aTarget, + PRInt64 aStartTime, + PRInt64 aEndTime, + PRInt64 aCurrentTime) +{ + NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); + + gint64 seekPos = aTarget * GST_USECOND; + LOG(PR_LOG_DEBUG, ("%p About to seek to %"GST_TIME_FORMAT, + mDecoder, GST_TIME_ARGS(seekPos))); + + if (!gst_element_seek_simple(mPlayBin, GST_FORMAT_TIME, + static_cast(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE), seekPos)) { + LOG(PR_LOG_ERROR, ("seek failed")); + return NS_ERROR_FAILURE; + } + LOG(PR_LOG_DEBUG, ("seek succeeded")); + + return DecodeToTarget(aTarget); +} + +nsresult nsGStreamerReader::GetBuffered(nsTimeRanges* aBuffered, + PRInt64 aStartTime) +{ + GstFormat format = GST_FORMAT_TIME; + MediaResource* resource = mDecoder->GetResource(); + gint64 resourceLength = resource->GetLength(); + nsTArray ranges; + resource->GetCachedRanges(ranges); + + if (mDecoder->OnStateMachineThread()) + /* Report the position from here while buffering as we can't report it from + * the gstreamer threads that are actually reading from the resource + */ + NotifyBytesConsumed(); + + if (resource->IsDataCachedToEndOfResource(0)) { + /* fast path for local or completely cached files */ + gint64 duration = 0; + GstFormat format = GST_FORMAT_TIME; + + duration = QueryDuration(); + double end = (double) duration / GST_MSECOND; + LOG(PR_LOG_DEBUG, ("complete range [0, %f] for [0, %li]", + end, resourceLength)); + aBuffered->Add(0, end); + return NS_OK; + } + + for(PRUint32 index = 0; index < ranges.Length(); index++) { + PRInt64 startOffset = ranges[index].mStart; + PRInt64 endOffset = ranges[index].mEnd; + gint64 startTime, endTime; + + if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, + startOffset, &format, &startTime) || format != GST_FORMAT_TIME) + continue; + if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, + endOffset, &format, &endTime) || format != GST_FORMAT_TIME) + continue; + + double start = start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND; + double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND; + LOG(PR_LOG_DEBUG, ("adding range [%f, %f] for [%li %li] size %li", + start, end, startOffset, endOffset, resourceLength)); + aBuffered->Add(start, end); + } + + return NS_OK; +} + +void nsGStreamerReader::ReadAndPushData(guint aLength) +{ + MediaResource* resource = mDecoder->GetResource(); + NS_ASSERTION(resource, "Decoder has no media resource"); + nsresult rv = NS_OK; + + GstBuffer *buffer = gst_buffer_new_and_alloc(aLength); + guint8 *data = GST_BUFFER_DATA(buffer); + PRUint32 size = 0, bytesRead = 0; + while(bytesRead < aLength) { + rv = resource->Read(reinterpret_cast(data + bytesRead), + aLength - bytesRead, &size); + if (NS_FAILED(rv) || size == 0) + break; + + bytesRead += size; + } + + GST_BUFFER_SIZE(buffer) = bytesRead; + mByteOffset += bytesRead; + + GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer)); + if (ret != GST_FLOW_OK) + LOG(PR_LOG_ERROR, ("ReadAndPushData push ret %s", gst_flow_get_name(ret))); + + if (GST_BUFFER_SIZE (buffer) < aLength) + /* If we read less than what we wanted, we reached the end */ + gst_app_src_end_of_stream(mSource); + + gst_buffer_unref(buffer); +} + +PRInt64 nsGStreamerReader::QueryDuration() +{ + gint64 duration = 0; + GstFormat format = GST_FORMAT_TIME; + + if (gst_element_query_duration(GST_ELEMENT(mPlayBin), + &format, &duration)) { + if (format == GST_FORMAT_TIME) { + LOG(PR_LOG_DEBUG, ("pipeline duration %"GST_TIME_FORMAT, + GST_TIME_ARGS (duration))); + duration = GST_TIME_AS_USECONDS (duration); + } + } + + if (mDecoder->mDuration != -1 && + mDecoder->mDuration > duration) { + /* We decoded more than the reported duration (which could be estimated) */ + LOG(PR_LOG_DEBUG, ("mDuration > duration")); + duration = mDecoder->mDuration; + } + + return duration; +} + +void nsGStreamerReader::NeedDataCb(GstAppSrc *aSrc, + guint aLength, + gpointer aUserData) +{ + nsGStreamerReader *reader = (nsGStreamerReader *) aUserData; + reader->NeedData(aSrc, aLength); +} + +void nsGStreamerReader::NeedData(GstAppSrc *aSrc, guint aLength) +{ + if (aLength == -1) + aLength = DEFAULT_SOURCE_READ_SIZE; + ReadAndPushData(aLength); +} + +void nsGStreamerReader::EnoughDataCb(GstAppSrc *aSrc, gpointer aUserData) +{ + nsGStreamerReader *reader = (nsGStreamerReader *) aUserData; + reader->EnoughData(aSrc); +} + +void nsGStreamerReader::EnoughData(GstAppSrc *aSrc) +{ +} + +gboolean nsGStreamerReader::SeekDataCb(GstAppSrc *aSrc, + guint64 aOffset, + gpointer aUserData) +{ + nsGStreamerReader *reader = (nsGStreamerReader *) aUserData; + return reader->SeekData(aSrc, aOffset); +} + +gboolean nsGStreamerReader::SeekData(GstAppSrc *aSrc, guint64 aOffset) +{ + ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); + MediaResource* resource = mDecoder->GetResource(); + + if (gst_app_src_get_size(mSource) == -1) + /* It's possible that we didn't know the length when we initialized mSource + * but maybe we do now + */ + gst_app_src_set_size(mSource, resource->GetLength()); + + nsresult rv = NS_ERROR_FAILURE; + if (aOffset < resource->GetLength()) + rv = resource->Seek(SEEK_SET, aOffset); + + if (NS_SUCCEEDED(rv)) + mByteOffset = mLastReportedByteOffset = aOffset; + else + LOG(PR_LOG_ERROR, ("seek at %lu failed", aOffset)); + + return NS_SUCCEEDED(rv); +} + +gboolean nsGStreamerReader::EventProbeCb(GstPad *aPad, + GstEvent *aEvent, + gpointer aUserData) +{ + nsGStreamerReader *reader = (nsGStreamerReader *) aUserData; + return reader->EventProbe(aPad, aEvent); +} + +gboolean nsGStreamerReader::EventProbe(GstPad *aPad, GstEvent *aEvent) +{ + GstElement *parent = GST_ELEMENT(gst_pad_get_parent(aPad)); + switch(GST_EVENT_TYPE(aEvent)) { + case GST_EVENT_NEWSEGMENT: + { + gboolean update; + gdouble rate; + GstFormat format; + gint64 start, stop, position; + GstSegment *segment; + + /* Store the segments so we can convert timestamps to stream time, which + * is what the upper layers sync on. + */ + ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); + gst_event_parse_new_segment(aEvent, &update, &rate, &format, + &start, &stop, &position); + if (parent == GST_ELEMENT(mVideoAppSink)) + segment = &mVideoSegment; + else + segment = &mAudioSegment; + gst_segment_set_newsegment(segment, update, rate, format, + start, stop, position); + break; + } + case GST_EVENT_FLUSH_STOP: + /* Reset on seeks */ + ResetDecode(); + break; + default: + break; + } + gst_object_unref(parent); + + return TRUE; +} + +GstFlowReturn nsGStreamerReader::NewPrerollCb(GstAppSink *aSink, + gpointer aUserData) +{ + nsGStreamerReader *reader = (nsGStreamerReader *) aUserData; + + if (aSink == reader->mVideoAppSink) + reader->VideoPreroll(); + else + reader->AudioPreroll(); + return GST_FLOW_OK; +} + +void nsGStreamerReader::AudioPreroll() +{ + /* The first audio buffer has reached the audio sink. Get rate and channels */ + LOG(PR_LOG_DEBUG, ("Audio preroll")); + GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink"); + GstCaps *caps = gst_pad_get_negotiated_caps(sinkpad); + GstStructure *s = gst_caps_get_structure(caps, 0); + mInfo.mAudioRate = mInfo.mAudioChannels = 0; + gst_structure_get_int(s, "rate", (gint *) &mInfo.mAudioRate); + gst_structure_get_int(s, "channels", (gint *) &mInfo.mAudioChannels); + NS_ASSERTION(mInfo.mAudioRate != 0, ("audio rate is zero")); + NS_ASSERTION(mInfo.mAudioChannels != 0, ("audio channels is zero")); + NS_ASSERTION(mInfo.mAudioChannels > 0 && mInfo.mAudioChannels <= MAX_CHANNELS, + "invalid audio channels number"); + mInfo.mHasAudio = true; + gst_caps_unref(caps); + gst_object_unref(sinkpad); +} + +void nsGStreamerReader::VideoPreroll() +{ + /* The first video buffer has reached the video sink. Get width and height */ + LOG(PR_LOG_DEBUG, ("Video preroll")); + GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink"); + GstCaps *caps = gst_pad_get_negotiated_caps(sinkpad); + gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height); + GstStructure *structure = gst_caps_get_structure(caps, 0); + gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen); + NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution"); + mInfo.mDisplay = nsIntSize(mPicture.width, mPicture.height); + mInfo.mHasVideo = true; + gst_caps_unref(caps); + gst_object_unref(sinkpad); +} + +GstFlowReturn nsGStreamerReader::NewBufferCb(GstAppSink *aSink, + gpointer aUserData) +{ + nsGStreamerReader *reader = (nsGStreamerReader *) aUserData; + + if (aSink == reader->mVideoAppSink) + reader->NewVideoBuffer(); + else + reader->NewAudioBuffer(); + + return GST_FLOW_OK; +} + +void nsGStreamerReader::NewVideoBuffer() +{ + ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); + /* We have a new video buffer queued in the video sink. Increment the counter + * and notify the decode thread potentially blocked in DecodeVideoFrame + */ + mDecoder->GetFrameStatistics().NotifyDecodedFrames(1, 0); + mVideoSinkBufferCount++; + mon.NotifyAll(); +} + +void nsGStreamerReader::NewAudioBuffer() +{ + ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); + /* We have a new audio buffer queued in the audio sink. Increment the counter + * and notify the decode thread potentially blocked in DecodeAudioData + */ + mAudioSinkBufferCount++; + mon.NotifyAll(); +} + +void nsGStreamerReader::EosCb(GstAppSink *aSink, gpointer aUserData) +{ + nsGStreamerReader *reader = (nsGStreamerReader *) aUserData; + reader->Eos(aSink); +} + +void nsGStreamerReader::Eos(GstAppSink *aSink) +{ + /* We reached the end of the stream */ + { + ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); + /* Potentially unblock DecodeVideoFrame and DecodeAudioData */ + mReachedEos = true; + mon.NotifyAll(); + } + + { + ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); + /* Potentially unblock the decode thread in ::DecodeLoop */ + mVideoQueue.Finish(); + mAudioQueue.Finish(); + mon.NotifyAll(); + } +} diff --git a/content/media/gstreamer/nsGStreamerReader.h b/content/media/gstreamer/nsGStreamerReader.h new file mode 100644 index 00000000000..8341adb537a --- /dev/null +++ b/content/media/gstreamer/nsGStreamerReader.h @@ -0,0 +1,134 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(nsGStreamerReader_h_) +#define nsGStreamerReader_h_ + +#include +#include +#include +#include +#include "nsBuiltinDecoderReader.h" + +using namespace mozilla; + +class nsMediaDecoder; +class nsTimeRanges; + +class nsGStreamerReader : public nsBuiltinDecoderReader +{ +public: + nsGStreamerReader(nsBuiltinDecoder* aDecoder); + virtual ~nsGStreamerReader(); + + virtual nsresult Init(nsBuiltinDecoderReader* aCloneDonor); + virtual nsresult ResetDecode(); + virtual bool DecodeAudioData(); + virtual bool DecodeVideoFrame(bool &aKeyframeSkip, + PRInt64 aTimeThreshold); + virtual nsresult ReadMetadata(nsVideoInfo* aInfo); + virtual nsresult Seek(PRInt64 aTime, + PRInt64 aStartTime, + PRInt64 aEndTime, + PRInt64 aCurrentTime); + virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime); + + virtual bool HasAudio() { + return mInfo.mHasAudio; + } + + virtual bool HasVideo() { + return mInfo.mHasVideo; + } + +private: + + void ReadAndPushData(guint aLength); + bool WaitForDecodedData(int *counter); + void NotifyBytesConsumed(); + PRInt64 QueryDuration(); + + /* Gst callbacks */ + + /* Called on the source-setup signal emitted by playbin. Used to + * configure appsrc . + */ + static void PlayBinSourceSetupCb(GstElement *aPlayBin, + GstElement *aSource, + gpointer aUserData); + void PlayBinSourceSetup(GstAppSrc *aSource); + + /* Called from appsrc when we need to read more data from the resource */ + static void NeedDataCb(GstAppSrc *aSrc, guint aLength, gpointer aUserData); + void NeedData(GstAppSrc *aSrc, guint aLength); + + /* Called when appsrc has enough data and we can stop reading */ + static void EnoughDataCb(GstAppSrc *aSrc, gpointer aUserData); + void EnoughData(GstAppSrc *aSrc); + + /* Called when a seek is issued on the pipeline */ + static gboolean SeekDataCb(GstAppSrc *aSrc, + guint64 aOffset, + gpointer aUserData); + gboolean SeekData(GstAppSrc *aSrc, guint64 aOffset); + + /* Called when events reach the sinks. See inline comments */ + static gboolean EventProbeCb(GstPad *aPad, GstEvent *aEvent, gpointer aUserData); + gboolean EventProbe(GstPad *aPad, GstEvent *aEvent); + + /* Called when the pipeline is prerolled, that is when at start or after a + * seek, the first audio and video buffers are queued in the sinks. + */ + static GstFlowReturn NewPrerollCb(GstAppSink *aSink, gpointer aUserData); + void VideoPreroll(); + void AudioPreroll(); + + /* Called when buffers reach the sinks */ + static GstFlowReturn NewBufferCb(GstAppSink *aSink, gpointer aUserData); + void NewVideoBuffer(); + void NewAudioBuffer(); + + /* Called at end of stream, when decoding has finished */ + static void EosCb(GstAppSink *aSink, gpointer aUserData); + void Eos(GstAppSink *aSink); + + GstElement *mPlayBin; + GstBus *mBus; + GstAppSrc *mSource; + /* video sink bin */ + GstElement *mVideoSink; + /* the actual video app sink */ + GstAppSink *mVideoAppSink; + /* audio sink bin */ + GstElement *mAudioSink; + /* the actual audio app sink */ + GstAppSink *mAudioAppSink; + GstVideoFormat mFormat; + nsIntRect mPicture; + int mVideoSinkBufferCount; + int mAudioSinkBufferCount; + GstAppSrcCallbacks mSrcCallbacks; + GstAppSinkCallbacks mSinkCallbacks; + /* monitor used to synchronize access to shared state between gstreamer + * threads and other gecko threads */ + mozilla::ReentrantMonitor mGstThreadsMonitor; + /* video and audio segments we use to convert absolute timestamps to [0, + * stream_duration]. They're set when the pipeline is started or after a seek. + * Concurrent access guarded with mGstThreadsMonitor. + */ + GstSegment mVideoSegment; + GstSegment mAudioSegment; + /* bool used to signal when gst has detected the end of stream and + * DecodeAudioData and DecodeVideoFrame should not expect any more data + */ + bool mReachedEos; + /* offset we've reached reading from the source */ + gint64 mByteOffset; + /* the last offset we reported with NotifyBytesConsumed */ + gint64 mLastReportedByteOffset; + int fpsNum; + int fpsDen; +}; + +#endif diff --git a/content/media/nsBuiltinDecoderReader.h b/content/media/nsBuiltinDecoderReader.h index 48d961062dd..fe8b498aceb 100644 --- a/content/media/nsBuiltinDecoderReader.h +++ b/content/media/nsBuiltinDecoderReader.h @@ -389,7 +389,7 @@ public: typedef mozilla::VideoFrameContainer VideoFrameContainer; nsBuiltinDecoderReader(nsBuiltinDecoder* aDecoder); - ~nsBuiltinDecoderReader(); + virtual ~nsBuiltinDecoderReader(); // Initializes the reader, returns NS_OK on success, or NS_ERROR_FAILURE // on failure. diff --git a/content/media/nsBuiltinDecoderStateMachine.cpp b/content/media/nsBuiltinDecoderStateMachine.cpp index a385c7711fb..64a0a80c790 100644 --- a/content/media/nsBuiltinDecoderStateMachine.cpp +++ b/content/media/nsBuiltinDecoderStateMachine.cpp @@ -458,6 +458,7 @@ nsBuiltinDecoderStateMachine::~nsBuiltinDecoderStateMachine() if (mTimer) mTimer->Cancel(); mTimer = nsnull; + mReader = nsnull; StateMachineTracker::Instance().CleanupGlobalStateMachine(); } diff --git a/js/src/config/system-headers b/js/src/config/system-headers index c0f022162ef..1486194c042 100644 --- a/js/src/config/system-headers +++ b/js/src/config/system-headers @@ -1055,3 +1055,7 @@ ogg/os_types.h nestegg/nestegg.h cubeb/cubeb.h #endif +gst/gst.h +gst/app/gstappsink.h +gst/app/gstappsrc.h +gst/video/video.h diff --git a/layout/build/Makefile.in b/layout/build/Makefile.in index badb2e0050f..1e31a44109e 100644 --- a/layout/build/Makefile.in +++ b/layout/build/Makefile.in @@ -183,6 +183,12 @@ SHARED_LIBRARY_LIBS += \ $(NULL) endif +ifdef MOZ_GSTREAMER +SHARED_LIBRARY_LIBS += \ + $(DEPTH)/content/media/gstreamer/$(LIB_PREFIX)gkcongstreamer_s.$(LIB_SUFFIX) \ + $(NULL) +endif + ifdef NS_PRINTING SHARED_LIBRARY_LIBS += \ ../printing/$(LIB_PREFIX)gkprinting_s.$(LIB_SUFFIX) \ @@ -223,6 +229,10 @@ endif SHARED_LIBRARY_LIBS += \ $(DEPTH)/js/xpconnect/src/$(LIB_PREFIX)xpconnect_s.$(LIB_SUFFIX) +ifdef MOZ_GSTREAMER +EXTRA_DSO_LDOPTS += $(GSTREAMER_LIBS) +endif + include $(topsrcdir)/config/config.mk include $(topsrcdir)/ipc/chromium/chromium-config.mk diff --git a/modules/libpref/src/init/all.js b/modules/libpref/src/init/all.js index d7ec0062e3d..3abc0a37bbd 100644 --- a/modules/libpref/src/init/all.js +++ b/modules/libpref/src/init/all.js @@ -193,6 +193,10 @@ pref("media.wave.enabled", true); #ifdef MOZ_WEBM pref("media.webm.enabled", true); #endif +#ifdef MOZ_GSTREAMER +pref("media.h264.enabled", true); +#endif + // Whether to autostart a media element with an |autoplay| attribute pref("media.autoplay.enabled", true); diff --git a/toolkit/library/Makefile.in b/toolkit/library/Makefile.in index 20df295aece..25ab5e43daa 100644 --- a/toolkit/library/Makefile.in +++ b/toolkit/library/Makefile.in @@ -555,6 +555,10 @@ ifdef MOZ_ENABLE_QT EXTRA_DSO_LDOPTS += $(MOZ_QT_LDFLAGS) $(XEXT_LIBS) endif +ifdef MOZ_GSTREAMER +EXTRA_DSO_LDOPTS += $(GSTREAMER_LIBS) +endif + include $(topsrcdir)/config/rules.mk export:: $(RDF_UTIL_SRC_CPPSRCS) $(INTL_UNICHARUTIL_UTIL_CPPSRCS)