2015-10-14 07:28:34 +03:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
2010-04-02 07:03:07 +04:00
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
2012-05-21 15:12:37 +04:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
2010-04-02 07:03:07 +04:00
|
|
|
|
2013-05-03 04:39:19 +04:00
|
|
|
#ifdef XP_WIN
|
|
|
|
// Include Windows headers required for enabling high precision timers.
|
2013-05-06 13:33:00 +04:00
|
|
|
#include "windows.h"
|
|
|
|
#include "mmsystem.h"
|
2013-05-03 04:39:19 +04:00
|
|
|
#endif
|
2014-02-18 02:53:53 +04:00
|
|
|
|
2015-09-10 11:37:26 +03:00
|
|
|
#include <algorithm>
|
2013-07-30 18:25:31 +04:00
|
|
|
#include <stdint.h>
|
2012-12-15 03:58:45 +04:00
|
|
|
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "gfx2DGlue.h"
|
|
|
|
|
2017-03-22 10:03:38 +03:00
|
|
|
#include "mediasink/AudioSink.h"
|
2015-09-02 09:14:40 +03:00
|
|
|
#include "mediasink/AudioSinkWrapper.h"
|
2015-09-07 14:32:04 +03:00
|
|
|
#include "mediasink/DecodedStream.h"
|
2015-12-24 05:14:16 +03:00
|
|
|
#include "mediasink/OutputStreamManager.h"
|
|
|
|
#include "mediasink/VideoSink.h"
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "mozilla/DebugOnly.h"
|
2017-02-13 20:07:40 +03:00
|
|
|
#include "mozilla/IndexSequence.h"
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "mozilla/Logging.h"
|
2010-04-02 07:03:07 +04:00
|
|
|
#include "mozilla/mozalloc.h"
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "mozilla/MathAlgorithms.h"
|
|
|
|
#include "mozilla/Preferences.h"
|
2015-08-03 21:44:10 +03:00
|
|
|
#include "mozilla/SharedThreadPool.h"
|
2017-01-11 21:19:18 +03:00
|
|
|
#include "mozilla/Sprintf.h"
|
2015-07-16 21:52:43 +03:00
|
|
|
#include "mozilla/TaskQueue.h"
|
2017-02-13 20:07:40 +03:00
|
|
|
#include "mozilla/Tuple.h"
|
2015-09-10 11:37:26 +03:00
|
|
|
|
|
|
|
#include "nsComponentManagerUtils.h"
|
|
|
|
#include "nsContentUtils.h"
|
2014-02-18 02:53:53 +04:00
|
|
|
#include "nsIEventTarget.h"
|
2017-09-18 14:34:43 +03:00
|
|
|
#include "nsIMemoryReporter.h"
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "nsITimer.h"
|
2014-08-20 11:26:00 +04:00
|
|
|
#include "nsPrintfCString.h"
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "nsTArray.h"
|
|
|
|
#include "nsDeque.h"
|
|
|
|
#include "prenv.h"
|
2014-02-09 12:04:38 +04:00
|
|
|
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "AudioSegment.h"
|
|
|
|
#include "DOMMediaStream.h"
|
|
|
|
#include "ImageContainer.h"
|
|
|
|
#include "MediaDecoder.h"
|
|
|
|
#include "MediaDecoderStateMachine.h"
|
|
|
|
#include "MediaShutdownManager.h"
|
2016-05-30 10:25:10 +03:00
|
|
|
#include "MediaPrefs.h"
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "MediaTimer.h"
|
2017-07-27 09:49:57 +03:00
|
|
|
#include "ReaderProxy.h"
|
2015-09-10 11:37:26 +03:00
|
|
|
#include "TimeUnits.h"
|
|
|
|
#include "VideoSegment.h"
|
|
|
|
#include "VideoUtils.h"
|
2016-03-09 00:34:49 +03:00
|
|
|
#include "gfxPrefs.h"
|
2010-04-02 07:03:07 +04:00
|
|
|
|
2012-11-14 23:45:33 +04:00
|
|
|
namespace mozilla {
|
|
|
|
|
2012-11-16 07:25:26 +04:00
|
|
|
using namespace mozilla::dom;
|
2015-05-28 22:13:56 +03:00
|
|
|
using namespace mozilla::layers;
|
|
|
|
using namespace mozilla::media;
|
2010-04-02 07:03:07 +04:00
|
|
|
|
2015-04-15 21:12:14 +03:00
|
|
|
#define NS_DispatchToMainThread(...) CompileError_UseAbstractThreadDispatchInstead
|
2015-04-07 22:20:43 +03:00
|
|
|
|
2014-04-23 13:29:04 +04:00
|
|
|
// avoid redefined macro in unified build
|
2016-08-02 11:45:28 +03:00
|
|
|
#undef FMT
|
2017-03-24 06:33:06 +03:00
|
|
|
#undef LOG
|
2017-03-24 06:35:45 +03:00
|
|
|
#undef LOGV
|
2017-03-24 06:38:22 +03:00
|
|
|
#undef LOGW
|
2016-09-06 09:33:30 +03:00
|
|
|
#undef SFMT
|
|
|
|
#undef SLOG
|
2017-03-24 07:42:49 +03:00
|
|
|
#undef SLOGW
|
2014-04-23 13:29:04 +04:00
|
|
|
|
2016-08-02 11:45:28 +03:00
|
|
|
#define FMT(x, ...) "Decoder=%p " x, mDecoderID, ##__VA_ARGS__
|
2017-03-24 06:33:06 +03:00
|
|
|
#define LOG(x, ...) MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (FMT(x, ##__VA_ARGS__)))
|
2017-03-24 06:35:45 +03:00
|
|
|
#define LOGV(x, ...) MOZ_LOG(gMediaDecoderLog, LogLevel::Verbose, (FMT(x, ##__VA_ARGS__)))
|
2017-03-24 06:38:22 +03:00
|
|
|
#define LOGW(x, ...) NS_WARNING(nsPrintfCString(FMT(x, ##__VA_ARGS__)).get())
|
2016-04-22 09:18:26 +03:00
|
|
|
|
2016-09-06 09:33:30 +03:00
|
|
|
// Used by StateObject and its sub-classes
|
|
|
|
#define SFMT(x, ...) "Decoder=%p state=%s " x, mMaster->mDecoderID, ToStateStr(GetState()), ##__VA_ARGS__
|
2016-10-11 08:28:56 +03:00
|
|
|
#define SLOG(x, ...) MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (SFMT(x, ##__VA_ARGS__)))
|
2017-03-24 07:42:49 +03:00
|
|
|
#define SLOGW(x, ...) NS_WARNING(nsPrintfCString(SFMT(x, ##__VA_ARGS__)).get())
|
2016-09-06 09:33:30 +03:00
|
|
|
|
2015-01-22 08:53:04 +03:00
|
|
|
// Certain constants get stored as member variables and then adjusted by various
|
|
|
|
// scale factors on a per-decoder basis. We want to make sure to avoid using these
|
|
|
|
// constants directly, so we put them in a namespace.
|
|
|
|
namespace detail {
|
|
|
|
|
2017-07-03 10:14:53 +03:00
|
|
|
// Resume a suspended video decoder to the current playback position plus this
|
|
|
|
// time premium for compensating the seeking delay.
|
|
|
|
static constexpr auto RESUME_VIDEO_PREMIUM = TimeUnit::FromMicroseconds(125000);
|
|
|
|
|
2017-03-28 09:38:30 +03:00
|
|
|
static const int64_t AMPLE_AUDIO_USECS = 2000000;
|
|
|
|
|
|
|
|
// If more than this much decoded audio is queued, we'll hold off
|
2017-07-19 09:19:40 +03:00
|
|
|
// decoding more audio.
|
2017-03-28 09:21:01 +03:00
|
|
|
static constexpr auto AMPLE_AUDIO_THRESHOLD = TimeUnit::FromMicroseconds(AMPLE_AUDIO_USECS);
|
|
|
|
|
2015-01-22 08:53:04 +03:00
|
|
|
} // namespace detail
|
|
|
|
|
2010-04-27 12:53:44 +04:00
|
|
|
// If we have fewer than LOW_VIDEO_FRAMES decoded frames, and
|
2014-03-11 07:44:08 +04:00
|
|
|
// we're not "prerolling video", we'll skip the video up to the next keyframe
|
2010-04-27 12:53:44 +04:00
|
|
|
// which is at or after the current playback position.
|
2015-07-03 10:29:30 +03:00
|
|
|
static const uint32_t LOW_VIDEO_FRAMES = 2;
|
2010-04-27 12:53:44 +04:00
|
|
|
|
2017-03-28 09:46:34 +03:00
|
|
|
// Threshold that used to check if we are low on decoded video.
|
2015-04-08 12:57:18 +03:00
|
|
|
// If the last video frame's end time |mDecodedVideoEndTime| is more than
|
2017-03-28 09:46:34 +03:00
|
|
|
// |LOW_VIDEO_THRESHOLD*mPlaybackRate| after the current clock in
|
2015-04-08 12:57:18 +03:00
|
|
|
// Advanceframe(), the video decode is lagging, and we skip to next keyframe.
|
2017-03-28 09:46:34 +03:00
|
|
|
static constexpr auto LOW_VIDEO_THRESHOLD = TimeUnit::FromMicroseconds(60000);
|
2014-12-22 06:32:31 +03:00
|
|
|
|
2010-05-31 08:02:00 +04:00
|
|
|
// Arbitrary "frame duration" when playing only audio.
|
2011-04-14 02:12:23 +04:00
|
|
|
static const int AUDIO_DURATION_USECS = 40000;
|
2010-05-31 08:02:00 +04:00
|
|
|
|
2015-01-22 08:53:04 +03:00
|
|
|
namespace detail {
|
|
|
|
|
2017-03-28 09:58:03 +03:00
|
|
|
// If we have less than this much buffered data available, we'll consider
|
|
|
|
// ourselves to be running low on buffered data. We determine how much
|
|
|
|
// buffered data we have remaining using the reader's GetBuffered()
|
2011-03-24 01:28:57 +03:00
|
|
|
// implementation.
|
2017-03-28 09:58:03 +03:00
|
|
|
static const int64_t LOW_BUFFER_THRESHOLD_USECS = 5000000;
|
2010-11-28 23:06:38 +03:00
|
|
|
|
2017-03-28 09:58:03 +03:00
|
|
|
static constexpr auto LOW_BUFFER_THRESHOLD = TimeUnit::FromMicroseconds(LOW_BUFFER_THRESHOLD_USECS);
|
2017-03-28 09:56:35 +03:00
|
|
|
|
2017-03-28 09:58:03 +03:00
|
|
|
// LOW_BUFFER_THRESHOLD_USECS needs to be greater than AMPLE_AUDIO_USECS, otherwise
|
2011-03-24 01:28:57 +03:00
|
|
|
// the skip-to-keyframe logic can activate when we're running low on data.
|
2017-03-28 09:58:03 +03:00
|
|
|
static_assert(LOW_BUFFER_THRESHOLD_USECS > AMPLE_AUDIO_USECS,
|
|
|
|
"LOW_BUFFER_THRESHOLD_USECS is too small");
|
2010-11-28 23:06:38 +03:00
|
|
|
|
2015-01-22 08:53:04 +03:00
|
|
|
} // namespace detail
|
|
|
|
|
2017-03-29 10:04:50 +03:00
|
|
|
// Amount of excess data to add in to the "should we buffer" calculation.
|
|
|
|
static constexpr auto EXHAUSTED_DATA_MARGIN = TimeUnit::FromMicroseconds(100000);
|
2011-03-24 01:28:57 +03:00
|
|
|
|
2015-02-01 23:28:41 +03:00
|
|
|
static const uint32_t MIN_VIDEO_QUEUE_SIZE = 3;
|
|
|
|
static const uint32_t MAX_VIDEO_QUEUE_SIZE = 10;
|
2016-03-15 03:54:19 +03:00
|
|
|
#ifdef MOZ_APPLEMEDIA
|
|
|
|
static const uint32_t HW_VIDEO_QUEUE_SIZE = 10;
|
|
|
|
#else
|
|
|
|
static const uint32_t HW_VIDEO_QUEUE_SIZE = 3;
|
|
|
|
#endif
|
2015-06-15 06:24:57 +03:00
|
|
|
static const uint32_t VIDEO_QUEUE_SEND_TO_COMPOSITOR_SIZE = 9999;
|
2015-02-01 23:28:41 +03:00
|
|
|
|
|
|
|
static uint32_t sVideoQueueDefaultSize = MAX_VIDEO_QUEUE_SIZE;
|
2016-03-15 03:54:19 +03:00
|
|
|
static uint32_t sVideoQueueHWAccelSize = HW_VIDEO_QUEUE_SIZE;
|
2017-01-27 15:20:37 +03:00
|
|
|
static uint32_t sVideoQueueSendToCompositorSize =
|
|
|
|
VIDEO_QUEUE_SEND_TO_COMPOSITOR_SIZE;
|
2015-02-01 23:28:41 +03:00
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
static void InitVideoQueuePrefs()
|
|
|
|
{
|
2015-10-11 15:11:04 +03:00
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
static bool sPrefInit = false;
|
|
|
|
if (!sPrefInit) {
|
|
|
|
sPrefInit = true;
|
|
|
|
sVideoQueueDefaultSize = Preferences::GetUint(
|
|
|
|
"media.video-queue.default-size", MAX_VIDEO_QUEUE_SIZE);
|
|
|
|
sVideoQueueHWAccelSize = Preferences::GetUint(
|
2016-03-15 03:54:19 +03:00
|
|
|
"media.video-queue.hw-accel-size", HW_VIDEO_QUEUE_SIZE);
|
2015-10-11 15:11:04 +03:00
|
|
|
sVideoQueueSendToCompositorSize = Preferences::GetUint(
|
|
|
|
"media.video-queue.send-to-compositor-size", VIDEO_QUEUE_SEND_TO_COMPOSITOR_SIZE);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-30 10:25:10 +03:00
|
|
|
// Delay, in milliseconds, that tabs needs to be in background before video
|
|
|
|
// decoding is suspended.
|
2016-07-14 10:50:23 +03:00
|
|
|
static TimeDuration
|
|
|
|
SuspendBackgroundVideoDelay()
|
2016-04-18 09:31:26 +03:00
|
|
|
{
|
2016-07-14 10:50:23 +03:00
|
|
|
return TimeDuration::FromMilliseconds(
|
|
|
|
MediaPrefs::MDSMSuspendBackgroundVideoDelay());
|
2016-04-18 09:31:26 +03:00
|
|
|
}
|
|
|
|
|
2016-09-06 06:01:57 +03:00
|
|
|
class MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2017-01-27 15:20:37 +03:00
|
|
|
virtual ~StateObject() { }
|
|
|
|
virtual void Exit() { } // Exit action.
|
|
|
|
virtual void Step() { } // Perform a 'cycle' of this state object.
|
2016-09-06 06:01:57 +03:00
|
|
|
virtual State GetState() const = 0;
|
|
|
|
|
2016-09-06 09:33:30 +03:00
|
|
|
// Event handlers for various events.
|
2017-01-27 15:20:37 +03:00
|
|
|
virtual void HandleAudioCaptured() { }
|
2017-03-22 06:59:54 +03:00
|
|
|
virtual void HandleAudioDecoded(AudioData* aAudio)
|
2017-01-09 10:48:00 +03:00
|
|
|
{
|
|
|
|
Crash("Unexpected event!", __func__);
|
|
|
|
}
|
2017-03-22 06:59:54 +03:00
|
|
|
virtual void HandleVideoDecoded(VideoData* aVideo, TimeStamp aDecodeStart)
|
2017-01-09 10:48:00 +03:00
|
|
|
{
|
|
|
|
Crash("Unexpected event!", __func__);
|
|
|
|
}
|
2017-01-09 10:53:12 +03:00
|
|
|
virtual void HandleAudioWaited(MediaData::Type aType)
|
|
|
|
{
|
|
|
|
Crash("Unexpected event!", __func__);
|
|
|
|
}
|
|
|
|
virtual void HandleVideoWaited(MediaData::Type aType)
|
|
|
|
{
|
|
|
|
Crash("Unexpected event!", __func__);
|
|
|
|
}
|
2017-01-06 12:43:05 +03:00
|
|
|
virtual void HandleWaitingForAudio()
|
|
|
|
{
|
2017-01-09 14:43:55 +03:00
|
|
|
Crash("Unexpected event!", __func__);
|
2017-01-06 12:43:05 +03:00
|
|
|
}
|
2017-01-06 12:49:11 +03:00
|
|
|
virtual void HandleAudioCanceled()
|
|
|
|
{
|
2017-01-09 10:44:18 +03:00
|
|
|
Crash("Unexpected event!", __func__);
|
2017-01-06 12:49:11 +03:00
|
|
|
}
|
2017-01-06 12:54:59 +03:00
|
|
|
virtual void HandleEndOfAudio()
|
|
|
|
{
|
2017-01-09 14:38:10 +03:00
|
|
|
Crash("Unexpected event!", __func__);
|
2017-01-06 12:54:59 +03:00
|
|
|
}
|
2017-01-09 07:59:32 +03:00
|
|
|
virtual void HandleWaitingForVideo()
|
|
|
|
{
|
2017-01-09 14:43:55 +03:00
|
|
|
Crash("Unexpected event!", __func__);
|
2017-01-09 07:59:32 +03:00
|
|
|
}
|
|
|
|
virtual void HandleVideoCanceled()
|
|
|
|
{
|
2017-01-09 10:44:18 +03:00
|
|
|
Crash("Unexpected event!", __func__);
|
2017-01-09 07:59:32 +03:00
|
|
|
}
|
|
|
|
virtual void HandleEndOfVideo()
|
|
|
|
{
|
2017-01-09 14:38:10 +03:00
|
|
|
Crash("Unexpected event!", __func__);
|
2017-01-09 07:59:32 +03:00
|
|
|
}
|
|
|
|
|
2016-11-03 10:21:48 +03:00
|
|
|
virtual RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget);
|
2016-10-04 11:33:40 +03:00
|
|
|
|
2016-10-11 11:20:23 +03:00
|
|
|
virtual RefPtr<ShutdownPromise> HandleShutdown();
|
|
|
|
|
2016-10-18 08:58:34 +03:00
|
|
|
virtual void HandleVideoSuspendTimeout() = 0;
|
2016-10-18 05:29:14 +03:00
|
|
|
|
2017-03-28 06:21:41 +03:00
|
|
|
virtual void HandleResumeVideoDecoding(const TimeUnit& aTarget);
|
2016-10-18 09:56:36 +03:00
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
virtual void HandlePlayStateChanged(MediaDecoder::PlayState aPlayState) { }
|
2016-10-21 09:53:18 +03:00
|
|
|
|
2017-01-18 12:55:59 +03:00
|
|
|
virtual nsCString GetDebugInfo() { return nsCString(); }
|
2016-10-05 12:28:13 +03:00
|
|
|
|
2016-10-26 08:56:08 +03:00
|
|
|
private:
|
|
|
|
template <class S, typename R, typename... As>
|
|
|
|
auto ReturnTypeHelper(R(S::*)(As...)) -> R;
|
|
|
|
|
2017-01-09 10:44:18 +03:00
|
|
|
void Crash(const char* aReason, const char* aSite)
|
|
|
|
{
|
|
|
|
char buf[1024];
|
2017-01-27 15:20:37 +03:00
|
|
|
SprintfLiteral(buf, "%s state=%s callsite=%s", aReason,
|
|
|
|
ToStateStr(GetState()), aSite);
|
2017-01-09 10:44:18 +03:00
|
|
|
MOZ_ReportAssertionFailure(buf, __FILE__, __LINE__);
|
|
|
|
MOZ_CRASH();
|
|
|
|
}
|
|
|
|
|
2016-09-06 06:01:57 +03:00
|
|
|
protected:
|
2016-11-03 10:32:51 +03:00
|
|
|
enum class EventVisibility : int8_t
|
|
|
|
{
|
|
|
|
Observable,
|
|
|
|
Suppressed
|
|
|
|
};
|
|
|
|
|
2016-09-06 06:01:57 +03:00
|
|
|
using Master = MediaDecoderStateMachine;
|
2017-05-22 10:33:18 +03:00
|
|
|
explicit StateObject(Master* aPtr) : mMaster(aPtr) { }
|
2016-09-06 09:33:30 +03:00
|
|
|
TaskQueue* OwnerThread() const { return mMaster->mTaskQueue; }
|
2017-07-27 09:49:57 +03:00
|
|
|
ReaderProxy* Reader() const { return mMaster->mReader; }
|
2016-10-06 06:25:24 +03:00
|
|
|
const MediaInfo& Info() const { return mMaster->Info(); }
|
2017-03-22 06:59:54 +03:00
|
|
|
MediaQueue<AudioData>& AudioQueue() const { return mMaster->mAudioQueue; }
|
|
|
|
MediaQueue<VideoData>& VideoQueue() const { return mMaster->mVideoQueue; }
|
2016-09-06 09:33:30 +03:00
|
|
|
|
2017-02-13 20:07:40 +03:00
|
|
|
template <class S, typename... Args, size_t... Indexes>
|
|
|
|
auto
|
|
|
|
CallEnterMemberFunction(S* aS,
|
|
|
|
Tuple<Args...>& aTuple,
|
|
|
|
IndexSequence<Indexes...>)
|
|
|
|
-> decltype(ReturnTypeHelper(&S::Enter))
|
|
|
|
{
|
|
|
|
return aS->Enter(Move(Get<Indexes>(aTuple))...);
|
|
|
|
}
|
|
|
|
|
2016-09-06 09:33:30 +03:00
|
|
|
// Note this function will delete the current state object.
|
|
|
|
// Don't access members to avoid UAF after this call.
|
2016-10-11 09:41:07 +03:00
|
|
|
template <class S, typename... Ts>
|
2017-02-13 20:07:40 +03:00
|
|
|
auto SetState(Ts&&... aArgs)
|
2016-10-26 08:56:08 +03:00
|
|
|
-> decltype(ReturnTypeHelper(&S::Enter))
|
2016-10-11 09:41:07 +03:00
|
|
|
{
|
2017-02-13 20:07:40 +03:00
|
|
|
// |aArgs| must be passed by reference to avoid passing MOZ_NON_PARAM class
|
|
|
|
// SeekJob by value. See bug 1287006 and bug 1338374. But we still *must*
|
|
|
|
// copy the parameters, because |Exit()| can modify them. See bug 1312321.
|
|
|
|
// So we 1) pass the parameters by reference, but then 2) immediately copy
|
|
|
|
// them into a Tuple to be safe against modification, and finally 3) move
|
|
|
|
// the elements of the Tuple into the final function call.
|
|
|
|
auto copiedArgs = MakeTuple(Forward<Ts>(aArgs)...);
|
|
|
|
|
2016-10-11 09:41:07 +03:00
|
|
|
// keep mMaster in a local object because mMaster will become invalid after
|
|
|
|
// the current state object is deleted.
|
|
|
|
auto master = mMaster;
|
|
|
|
|
2017-02-13 20:07:40 +03:00
|
|
|
auto* s = new S(master);
|
2016-10-12 09:04:25 +03:00
|
|
|
|
2017-09-04 12:27:43 +03:00
|
|
|
MOZ_ASSERT(GetState() != s->GetState() ||
|
|
|
|
GetState() == DECODER_STATE_SEEKING);
|
2016-10-11 09:41:07 +03:00
|
|
|
|
|
|
|
SLOG("change state to: %s", ToStateStr(s->GetState()));
|
|
|
|
|
|
|
|
Exit();
|
2016-10-17 09:23:52 +03:00
|
|
|
|
|
|
|
master->mStateObj.reset(s);
|
2017-02-13 20:07:40 +03:00
|
|
|
return CallEnterMemberFunction(s, copiedArgs,
|
|
|
|
typename IndexSequenceFor<Ts...>::Type());
|
2016-10-11 09:41:07 +03:00
|
|
|
}
|
2016-09-06 06:01:57 +03:00
|
|
|
|
2016-11-30 05:32:29 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise>
|
|
|
|
SetSeekingState(SeekJob&& aSeekJob, EventVisibility aVisibility);
|
|
|
|
|
2016-09-06 06:01:57 +03:00
|
|
|
// Take a raw pointer in order not to change the life cycle of MDSM.
|
|
|
|
// It is guaranteed to be valid by MDSM.
|
|
|
|
Master* mMaster;
|
|
|
|
};
|
|
|
|
|
2016-10-21 06:37:00 +03:00
|
|
|
/**
|
|
|
|
* Purpose: decode metadata like duration and dimensions of the media resource.
|
|
|
|
*
|
|
|
|
* Transition to other states when decoding metadata is done:
|
|
|
|
* SHUTDOWN if failing to decode metadata.
|
|
|
|
* DECODING_FIRSTFRAME otherwise.
|
|
|
|
*/
|
2016-09-06 06:01:57 +03:00
|
|
|
class MediaDecoderStateMachine::DecodeMetadataState
|
|
|
|
: public MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2017-01-27 15:20:37 +03:00
|
|
|
explicit DecodeMetadataState(Master* aPtr) : StateObject(aPtr) { }
|
2016-09-06 06:01:57 +03:00
|
|
|
|
2016-10-12 09:16:40 +03:00
|
|
|
void Enter()
|
2016-09-06 06:01:57 +03:00
|
|
|
{
|
2016-10-18 09:56:36 +03:00
|
|
|
MOZ_ASSERT(!mMaster->mVideoDecodeSuspended);
|
2016-09-06 09:33:30 +03:00
|
|
|
MOZ_ASSERT(!mMetadataRequest.Exists());
|
|
|
|
SLOG("Dispatching AsyncReadMetadata");
|
|
|
|
|
|
|
|
// We disconnect mMetadataRequest in Exit() so it is fine to capture
|
|
|
|
// a raw pointer here.
|
2017-01-11 11:33:29 +03:00
|
|
|
Reader()->ReadMetadata()
|
2016-09-06 09:33:30 +03:00
|
|
|
->Then(OwnerThread(), __func__,
|
2017-05-18 05:20:44 +03:00
|
|
|
[this] (MetadataHolder&& aMetadata) {
|
|
|
|
OnMetadataRead(Move(aMetadata));
|
2016-09-06 09:33:30 +03:00
|
|
|
},
|
2016-09-10 12:56:50 +03:00
|
|
|
[this] (const MediaResult& aError) {
|
|
|
|
OnMetadataNotRead(aError);
|
2017-01-11 11:33:29 +03:00
|
|
|
})
|
|
|
|
->Track(mMetadataRequest);
|
2016-09-06 09:33:30 +03:00
|
|
|
}
|
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
void Exit() override { mMetadataRequest.DisconnectIfExists(); }
|
2016-09-06 06:01:57 +03:00
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
State GetState() const override { return DECODER_STATE_DECODING_METADATA; }
|
2016-09-06 09:33:30 +03:00
|
|
|
|
2016-10-11 06:11:42 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget) override
|
|
|
|
{
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(false, "Can't seek while decoding metadata.");
|
|
|
|
return MediaDecoder::SeekPromise::CreateAndReject(true, __func__);
|
|
|
|
}
|
|
|
|
|
2016-10-18 05:31:17 +03:00
|
|
|
void HandleVideoSuspendTimeout() override
|
|
|
|
{
|
|
|
|
// Do nothing since no decoders are created yet.
|
|
|
|
}
|
|
|
|
|
2017-03-28 06:21:41 +03:00
|
|
|
void HandleResumeVideoDecoding(const TimeUnit&) override
|
2016-10-18 09:56:36 +03:00
|
|
|
{
|
|
|
|
// We never suspend video decoding in this state.
|
|
|
|
MOZ_ASSERT(false, "Shouldn't have suspended video decoding.");
|
|
|
|
}
|
|
|
|
|
2016-09-06 09:33:30 +03:00
|
|
|
private:
|
2017-05-18 05:20:44 +03:00
|
|
|
void OnMetadataRead(MetadataHolder&& aMetadata);
|
2016-09-06 09:33:30 +03:00
|
|
|
|
2016-09-10 12:56:50 +03:00
|
|
|
void OnMetadataNotRead(const MediaResult& aError)
|
2016-09-06 09:33:30 +03:00
|
|
|
{
|
|
|
|
mMetadataRequest.Complete();
|
2017-03-24 07:42:49 +03:00
|
|
|
SLOGW("Decode metadata failed, shutting down decoder");
|
2016-09-10 12:56:50 +03:00
|
|
|
mMaster->DecodeError(aError);
|
2016-09-06 09:33:30 +03:00
|
|
|
}
|
|
|
|
|
2017-07-19 12:34:05 +03:00
|
|
|
MozPromiseRequestHolder<MediaFormatReader::MetadataPromise> mMetadataRequest;
|
2016-09-06 06:01:57 +03:00
|
|
|
};
|
|
|
|
|
2016-10-21 06:37:00 +03:00
|
|
|
/**
|
|
|
|
* Purpose: release decoder resources to save memory and hardware resources.
|
|
|
|
*
|
|
|
|
* Transition to:
|
2016-11-03 10:02:22 +03:00
|
|
|
* SEEKING if any seek request or play state changes to PLAYING.
|
2016-10-21 06:37:00 +03:00
|
|
|
*/
|
2016-09-06 06:07:55 +03:00
|
|
|
class MediaDecoderStateMachine::DormantState
|
|
|
|
: public MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2017-01-27 15:20:37 +03:00
|
|
|
explicit DormantState(Master* aPtr) : StateObject(aPtr) { }
|
2016-09-06 06:07:55 +03:00
|
|
|
|
2016-11-03 09:54:40 +03:00
|
|
|
void Enter()
|
2016-09-06 06:07:55 +03:00
|
|
|
{
|
|
|
|
if (mMaster->IsPlaying()) {
|
|
|
|
mMaster->StopPlayback();
|
|
|
|
}
|
2016-11-03 09:54:40 +03:00
|
|
|
|
|
|
|
// Calculate the position to seek to when exiting dormant.
|
2017-03-28 11:16:56 +03:00
|
|
|
auto t = mMaster->mMediaSink->IsStarted()
|
2017-03-28 11:35:24 +03:00
|
|
|
? mMaster->GetClock() : mMaster->GetMediaTime();
|
2016-12-19 07:25:14 +03:00
|
|
|
mPendingSeek.mTarget.emplace(t, SeekTarget::Accurate);
|
2016-11-03 09:54:40 +03:00
|
|
|
// SeekJob asserts |mTarget.IsValid() == !mPromise.IsEmpty()| so we
|
|
|
|
// need to create the promise even it is not used at all.
|
2017-05-24 10:11:10 +03:00
|
|
|
// The promise may be used when coming out of DormantState into
|
|
|
|
// SeekingState.
|
2017-01-27 15:20:37 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> x =
|
|
|
|
mPendingSeek.mPromise.Ensure(__func__);
|
2016-11-03 09:54:40 +03:00
|
|
|
|
2017-05-05 06:14:46 +03:00
|
|
|
// No need to call ResetDecode() and StopMediaSink() here.
|
|
|
|
// We will do them during seeking when exiting dormant.
|
|
|
|
|
|
|
|
// Ignore WAIT_FOR_DATA since we won't decode in dormant.
|
|
|
|
mMaster->mAudioWaitRequest.DisconnectIfExists();
|
|
|
|
mMaster->mVideoWaitRequest.DisconnectIfExists();
|
|
|
|
|
|
|
|
MaybeReleaseResources();
|
2016-09-06 06:07:55 +03:00
|
|
|
}
|
|
|
|
|
2016-10-19 10:35:29 +03:00
|
|
|
void Exit() override
|
|
|
|
{
|
2016-10-28 11:14:14 +03:00
|
|
|
// mPendingSeek is either moved when exiting dormant or
|
|
|
|
// should be rejected here before transition to SHUTDOWN.
|
2016-10-19 13:12:06 +03:00
|
|
|
mPendingSeek.RejectIfExists(__func__);
|
2016-10-19 10:35:29 +03:00
|
|
|
}
|
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
State GetState() const override { return DECODER_STATE_DORMANT; }
|
2016-09-06 10:15:48 +03:00
|
|
|
|
2017-05-24 10:11:10 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget) override;
|
|
|
|
|
2016-10-18 05:35:23 +03:00
|
|
|
void HandleVideoSuspendTimeout() override
|
|
|
|
{
|
|
|
|
// Do nothing since we've released decoders in Enter().
|
|
|
|
}
|
2016-10-18 09:56:36 +03:00
|
|
|
|
2017-03-28 06:21:41 +03:00
|
|
|
void HandleResumeVideoDecoding(const TimeUnit&) override
|
2016-10-18 09:56:36 +03:00
|
|
|
{
|
|
|
|
// Do nothing since we won't resume decoding until exiting dormant.
|
|
|
|
}
|
2016-10-19 10:35:29 +03:00
|
|
|
|
2016-10-28 11:10:49 +03:00
|
|
|
void HandlePlayStateChanged(MediaDecoder::PlayState aPlayState) override;
|
2016-10-21 10:52:17 +03:00
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
void HandleAudioDecoded(AudioData*) override { MaybeReleaseResources(); }
|
2017-05-05 06:14:46 +03:00
|
|
|
void HandleVideoDecoded(VideoData*, TimeStamp) override
|
|
|
|
{
|
|
|
|
MaybeReleaseResources();
|
|
|
|
}
|
2017-05-22 10:33:18 +03:00
|
|
|
void HandleWaitingForAudio() override { MaybeReleaseResources(); }
|
|
|
|
void HandleWaitingForVideo() override { MaybeReleaseResources(); }
|
|
|
|
void HandleAudioCanceled() override { MaybeReleaseResources(); }
|
|
|
|
void HandleVideoCanceled() override { MaybeReleaseResources(); }
|
|
|
|
void HandleEndOfAudio() override { MaybeReleaseResources(); }
|
|
|
|
void HandleEndOfVideo() override { MaybeReleaseResources(); }
|
2017-05-05 06:14:46 +03:00
|
|
|
|
2016-10-19 10:35:29 +03:00
|
|
|
private:
|
2017-05-05 06:14:46 +03:00
|
|
|
void MaybeReleaseResources()
|
|
|
|
{
|
|
|
|
if (!mMaster->mAudioDataRequest.Exists() &&
|
|
|
|
!mMaster->mVideoDataRequest.Exists()) {
|
|
|
|
// Release decoders only when they are idle. Otherwise it might cause
|
|
|
|
// decode error later when resetting decoders during seeking.
|
|
|
|
mMaster->mReader->ReleaseResources();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-19 10:35:29 +03:00
|
|
|
SeekJob mPendingSeek;
|
2016-09-06 06:07:55 +03:00
|
|
|
};
|
|
|
|
|
2016-10-21 06:37:00 +03:00
|
|
|
/**
|
|
|
|
* Purpose: decode the 1st audio and video frames to fire the 'loadeddata' event.
|
|
|
|
*
|
|
|
|
* Transition to:
|
|
|
|
* SHUTDOWN if any decode error.
|
2016-11-23 12:48:48 +03:00
|
|
|
* SEEKING if any seek request.
|
2016-10-21 06:37:00 +03:00
|
|
|
* DECODING when the 'loadeddata' event is fired.
|
|
|
|
*/
|
2016-09-06 06:10:52 +03:00
|
|
|
class MediaDecoderStateMachine::DecodingFirstFrameState
|
|
|
|
: public MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2017-01-27 15:20:37 +03:00
|
|
|
explicit DecodingFirstFrameState(Master* aPtr) : StateObject(aPtr) { }
|
2016-09-06 06:10:52 +03:00
|
|
|
|
2016-11-23 12:56:12 +03:00
|
|
|
void Enter();
|
2016-10-19 12:56:20 +03:00
|
|
|
|
2017-02-08 13:15:28 +03:00
|
|
|
void Exit() override
|
|
|
|
{
|
|
|
|
// mPendingSeek is either moved in MaybeFinishDecodeFirstFrame()
|
|
|
|
// or should be rejected here before transition to SHUTDOWN.
|
|
|
|
mPendingSeek.RejectIfExists(__func__);
|
|
|
|
}
|
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
State GetState() const override { return DECODER_STATE_DECODING_FIRSTFRAME; }
|
2016-09-30 11:45:48 +03:00
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleAudioDecoded(AudioData* aAudio) override
|
2016-09-30 11:45:48 +03:00
|
|
|
{
|
2016-12-21 10:54:30 +03:00
|
|
|
mMaster->PushAudio(aAudio);
|
2016-10-03 12:18:38 +03:00
|
|
|
MaybeFinishDecodeFirstFrame();
|
2016-09-30 11:45:48 +03:00
|
|
|
}
|
2016-09-30 11:53:33 +03:00
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleVideoDecoded(VideoData* aVideo, TimeStamp aDecodeStart) override
|
2016-09-30 11:53:33 +03:00
|
|
|
{
|
2016-12-21 10:54:30 +03:00
|
|
|
mMaster->PushVideo(aVideo);
|
2016-10-03 12:18:38 +03:00
|
|
|
MaybeFinishDecodeFirstFrame();
|
2016-09-30 11:53:33 +03:00
|
|
|
}
|
2016-09-30 12:27:31 +03:00
|
|
|
|
2017-01-06 12:43:05 +03:00
|
|
|
void HandleWaitingForAudio() override
|
|
|
|
{
|
|
|
|
mMaster->WaitForData(MediaData::AUDIO_DATA);
|
|
|
|
}
|
|
|
|
|
2017-01-06 12:49:11 +03:00
|
|
|
void HandleAudioCanceled() override
|
|
|
|
{
|
|
|
|
mMaster->RequestAudioData();
|
|
|
|
}
|
|
|
|
|
2017-01-06 12:54:59 +03:00
|
|
|
void HandleEndOfAudio() override
|
|
|
|
{
|
|
|
|
AudioQueue().Finish();
|
|
|
|
MaybeFinishDecodeFirstFrame();
|
|
|
|
}
|
|
|
|
|
2017-01-09 07:59:32 +03:00
|
|
|
void HandleWaitingForVideo() override
|
|
|
|
{
|
|
|
|
mMaster->WaitForData(MediaData::VIDEO_DATA);
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoCanceled() override
|
|
|
|
{
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(media::TimeUnit());
|
2017-01-09 07:59:32 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void HandleEndOfVideo() override
|
|
|
|
{
|
|
|
|
VideoQueue().Finish();
|
|
|
|
MaybeFinishDecodeFirstFrame();
|
|
|
|
}
|
2016-10-03 12:18:38 +03:00
|
|
|
|
2016-12-21 13:23:00 +03:00
|
|
|
void HandleAudioWaited(MediaData::Type aType) override
|
|
|
|
{
|
|
|
|
mMaster->RequestAudioData();
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoWaited(MediaData::Type aType) override
|
|
|
|
{
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(media::TimeUnit());
|
2016-12-21 13:23:00 +03:00
|
|
|
}
|
|
|
|
|
2016-10-18 05:40:52 +03:00
|
|
|
void HandleVideoSuspendTimeout() override
|
|
|
|
{
|
2017-01-27 15:20:37 +03:00
|
|
|
// Do nothing for we need to decode the 1st video frame to get the
|
|
|
|
// dimensions.
|
2016-10-18 05:40:52 +03:00
|
|
|
}
|
|
|
|
|
2017-03-28 06:21:41 +03:00
|
|
|
void HandleResumeVideoDecoding(const TimeUnit&) override
|
2016-10-18 09:56:36 +03:00
|
|
|
{
|
|
|
|
// We never suspend video decoding in this state.
|
|
|
|
MOZ_ASSERT(false, "Shouldn't have suspended video decoding.");
|
|
|
|
}
|
|
|
|
|
2017-02-08 13:15:28 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget) override
|
|
|
|
{
|
|
|
|
if (mMaster->mIsMSE) {
|
|
|
|
return StateObject::HandleSeek(aTarget);
|
|
|
|
}
|
|
|
|
// Delay seek request until decoding first frames for non-MSE media.
|
|
|
|
SLOG("Not Enough Data to seek at this stage, queuing seek");
|
|
|
|
mPendingSeek.RejectIfExists(__func__);
|
|
|
|
mPendingSeek.mTarget.emplace(aTarget);
|
|
|
|
return mPendingSeek.mPromise.Ensure(__func__);
|
|
|
|
}
|
|
|
|
|
2016-10-03 12:18:38 +03:00
|
|
|
private:
|
|
|
|
// Notify FirstFrameLoaded if having decoded first frames and
|
|
|
|
// transition to SEEKING if there is any pending seek, or DECODING otherwise.
|
2016-10-11 09:22:24 +03:00
|
|
|
void MaybeFinishDecodeFirstFrame();
|
2017-02-08 13:15:28 +03:00
|
|
|
|
|
|
|
SeekJob mPendingSeek;
|
2016-09-06 06:10:52 +03:00
|
|
|
};
|
|
|
|
|
2016-10-21 06:37:00 +03:00
|
|
|
/**
|
|
|
|
* Purpose: decode audio/video data for playback.
|
|
|
|
*
|
|
|
|
* Transition to:
|
2016-10-28 11:20:15 +03:00
|
|
|
* DORMANT if playback is paused for a while.
|
2016-10-21 06:37:00 +03:00
|
|
|
* SEEKING if any seek request.
|
|
|
|
* SHUTDOWN if any decode error.
|
|
|
|
* BUFFERING if playback can't continue due to lack of decoded data.
|
|
|
|
* COMPLETED when having decoded all audio/video data.
|
|
|
|
*/
|
2016-09-06 06:14:09 +03:00
|
|
|
class MediaDecoderStateMachine::DecodingState
|
|
|
|
: public MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2016-10-20 09:45:05 +03:00
|
|
|
explicit DecodingState(Master* aPtr)
|
|
|
|
: StateObject(aPtr)
|
|
|
|
, mDormantTimer(OwnerThread())
|
|
|
|
{
|
|
|
|
}
|
2016-09-06 06:14:09 +03:00
|
|
|
|
2016-10-12 09:16:40 +03:00
|
|
|
void Enter();
|
2016-09-06 06:14:09 +03:00
|
|
|
|
2016-09-06 11:59:02 +03:00
|
|
|
void Exit() override
|
|
|
|
{
|
|
|
|
if (!mDecodeStartTime.IsNull()) {
|
|
|
|
TimeDuration decodeDuration = TimeStamp::Now() - mDecodeStartTime;
|
|
|
|
SLOG("Exiting DECODING, decoded for %.3lfs", decodeDuration.ToSeconds());
|
|
|
|
}
|
2016-10-20 09:45:05 +03:00
|
|
|
mDormantTimer.Reset();
|
2016-12-22 12:28:14 +03:00
|
|
|
mOnAudioPopped.DisconnectIfExists();
|
|
|
|
mOnVideoPopped.DisconnectIfExists();
|
2016-09-06 11:59:02 +03:00
|
|
|
}
|
|
|
|
|
2016-09-06 06:14:09 +03:00
|
|
|
void Step() override
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
if (mMaster->mPlayState != MediaDecoder::PLAY_STATE_PLAYING &&
|
|
|
|
mMaster->IsPlaying()) {
|
2016-09-06 12:36:53 +03:00
|
|
|
// We're playing, but the element/decoder is in paused state. Stop
|
|
|
|
// playing!
|
|
|
|
mMaster->StopPlayback();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Start playback if necessary so that the clock can be properly queried.
|
2016-10-05 12:30:38 +03:00
|
|
|
if (!mIsPrerolling) {
|
2016-10-05 12:21:47 +03:00
|
|
|
mMaster->MaybeStartPlayback();
|
|
|
|
}
|
2016-09-06 12:36:53 +03:00
|
|
|
|
|
|
|
mMaster->UpdatePlaybackPositionPeriodically();
|
|
|
|
|
2017-09-04 12:27:43 +03:00
|
|
|
MOZ_ASSERT(!mMaster->IsPlaying() || mMaster->IsStateMachineScheduled(),
|
2016-09-06 12:36:53 +03:00
|
|
|
"Must have timer scheduled");
|
|
|
|
|
2016-10-11 10:54:33 +03:00
|
|
|
MaybeStartBuffering();
|
2016-09-06 06:14:09 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
State GetState() const override
|
|
|
|
{
|
|
|
|
return DECODER_STATE_DECODING;
|
|
|
|
}
|
2016-09-06 11:59:02 +03:00
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleAudioDecoded(AudioData* aAudio) override
|
2016-09-30 11:45:48 +03:00
|
|
|
{
|
2016-12-21 11:05:08 +03:00
|
|
|
mMaster->PushAudio(aAudio);
|
2017-01-06 10:43:31 +03:00
|
|
|
DispatchDecodeTasksIfNeeded();
|
2016-10-05 12:15:27 +03:00
|
|
|
MaybeStopPrerolling();
|
2016-09-30 11:45:48 +03:00
|
|
|
}
|
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleVideoDecoded(VideoData* aVideo, TimeStamp aDecodeStart) override
|
2016-09-30 11:53:33 +03:00
|
|
|
{
|
2016-12-21 11:05:08 +03:00
|
|
|
mMaster->PushVideo(aVideo);
|
2017-01-06 10:43:31 +03:00
|
|
|
DispatchDecodeTasksIfNeeded();
|
2016-10-05 12:15:27 +03:00
|
|
|
MaybeStopPrerolling();
|
2016-09-30 11:53:33 +03:00
|
|
|
}
|
|
|
|
|
2017-01-09 10:44:18 +03:00
|
|
|
void HandleAudioCanceled() override
|
|
|
|
{
|
2017-01-09 17:25:20 +03:00
|
|
|
mMaster->RequestAudioData();
|
2017-01-09 10:44:18 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoCanceled() override
|
|
|
|
{
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(mMaster->GetMediaTime());
|
2017-01-09 10:44:18 +03:00
|
|
|
}
|
|
|
|
|
2017-01-09 08:18:37 +03:00
|
|
|
void HandleEndOfAudio() override;
|
|
|
|
void HandleEndOfVideo() override;
|
2016-10-05 12:07:55 +03:00
|
|
|
|
2017-01-09 08:06:55 +03:00
|
|
|
void HandleWaitingForAudio() override
|
|
|
|
{
|
|
|
|
mMaster->WaitForData(MediaData::AUDIO_DATA);
|
|
|
|
MaybeStopPrerolling();
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleWaitingForVideo() override
|
2016-10-05 12:11:41 +03:00
|
|
|
{
|
2017-01-09 08:06:55 +03:00
|
|
|
mMaster->WaitForData(MediaData::VIDEO_DATA);
|
2016-10-05 12:15:27 +03:00
|
|
|
MaybeStopPrerolling();
|
2016-10-05 12:11:41 +03:00
|
|
|
}
|
|
|
|
|
2017-01-09 10:53:12 +03:00
|
|
|
void HandleAudioWaited(MediaData::Type aType) override
|
|
|
|
{
|
2017-01-09 17:25:20 +03:00
|
|
|
mMaster->RequestAudioData();
|
2017-01-09 10:53:12 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoWaited(MediaData::Type aType) override
|
|
|
|
{
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(mMaster->GetMediaTime());
|
2017-01-09 10:53:12 +03:00
|
|
|
}
|
|
|
|
|
2016-10-28 11:33:08 +03:00
|
|
|
void HandleAudioCaptured() override
|
2016-10-05 12:03:08 +03:00
|
|
|
{
|
2016-10-05 12:15:27 +03:00
|
|
|
MaybeStopPrerolling();
|
2016-10-05 12:03:08 +03:00
|
|
|
// MediaSink is changed. Schedule Step() to check if we can start playback.
|
|
|
|
mMaster->ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
|
2016-10-18 05:48:46 +03:00
|
|
|
void HandleVideoSuspendTimeout() override
|
|
|
|
{
|
2017-03-08 14:28:13 +03:00
|
|
|
// No video, so nothing to suspend.
|
|
|
|
if (!mMaster->HasVideo()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
mMaster->mVideoDecodeSuspended = true;
|
|
|
|
mMaster->mOnPlaybackEvent.Notify(MediaEventType::EnterVideoSuspend);
|
|
|
|
Reader()->SetVideoBlankDecode(true);
|
2016-10-18 05:48:46 +03:00
|
|
|
}
|
|
|
|
|
2016-10-21 09:53:18 +03:00
|
|
|
void HandlePlayStateChanged(MediaDecoder::PlayState aPlayState) override
|
|
|
|
{
|
|
|
|
if (aPlayState == MediaDecoder::PLAY_STATE_PLAYING) {
|
|
|
|
// Schedule Step() to check if we can start playback.
|
|
|
|
mMaster->ScheduleStateMachine();
|
2017-01-05 13:12:06 +03:00
|
|
|
// Try to dispatch decoding tasks for mMinimizePreroll might be reset.
|
2017-01-06 10:43:31 +03:00
|
|
|
DispatchDecodeTasksIfNeeded();
|
2016-10-21 09:53:18 +03:00
|
|
|
}
|
2016-10-20 09:45:05 +03:00
|
|
|
|
|
|
|
if (aPlayState == MediaDecoder::PLAY_STATE_PAUSED) {
|
|
|
|
StartDormantTimer();
|
|
|
|
} else {
|
|
|
|
mDormantTimer.Reset();
|
|
|
|
}
|
2016-10-21 09:53:18 +03:00
|
|
|
}
|
|
|
|
|
2017-01-18 12:55:59 +03:00
|
|
|
nsCString GetDebugInfo() override
|
2016-10-05 12:28:13 +03:00
|
|
|
{
|
2017-01-18 12:55:59 +03:00
|
|
|
return nsPrintfCString("mIsPrerolling=%d", mIsPrerolling);
|
2016-10-05 12:28:13 +03:00
|
|
|
}
|
|
|
|
|
2016-09-06 11:59:02 +03:00
|
|
|
private:
|
2017-01-06 10:43:31 +03:00
|
|
|
void DispatchDecodeTasksIfNeeded();
|
2017-01-09 16:11:01 +03:00
|
|
|
void EnsureAudioDecodeTaskQueued();
|
|
|
|
void EnsureVideoDecodeTaskQueued();
|
2016-10-11 10:54:33 +03:00
|
|
|
void MaybeStartBuffering();
|
|
|
|
|
2017-03-28 10:38:34 +03:00
|
|
|
// At the start of decoding we want to "preroll" the decode until we've
|
|
|
|
// got a few frames decoded before we consider whether decode is falling
|
|
|
|
// behind. Otherwise our "we're falling behind" logic will trigger
|
|
|
|
// unnecessarily if we start playing as soon as the first sample is
|
|
|
|
// decoded. These two fields store how many video frames and audio
|
|
|
|
// samples we must consume before are considered to be finished prerolling.
|
2017-03-28 10:42:09 +03:00
|
|
|
TimeUnit AudioPrerollThreshold() const
|
2017-03-28 10:38:34 +03:00
|
|
|
{
|
2017-03-28 10:42:09 +03:00
|
|
|
return mMaster->mAmpleAudioThreshold / 2;
|
2017-03-28 10:38:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t VideoPrerollFrames() const
|
|
|
|
{
|
|
|
|
return mMaster->GetAmpleVideoFrames() / 2;
|
|
|
|
}
|
|
|
|
|
2016-10-05 12:40:01 +03:00
|
|
|
bool DonePrerollingAudio()
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
return !mMaster->IsAudioDecoding() ||
|
|
|
|
mMaster->GetDecodedAudioDuration()
|
|
|
|
>= AudioPrerollThreshold().MultDouble(mMaster->mPlaybackRate);
|
2016-10-05 12:40:01 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
bool DonePrerollingVideo()
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
return !mMaster->IsVideoDecoding() ||
|
|
|
|
static_cast<uint32_t>(mMaster->VideoQueue().GetSize()) >=
|
|
|
|
VideoPrerollFrames() * mMaster->mPlaybackRate + 1;
|
2016-10-05 12:40:01 +03:00
|
|
|
}
|
|
|
|
|
2016-10-05 12:15:27 +03:00
|
|
|
void MaybeStopPrerolling()
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
if (mIsPrerolling &&
|
|
|
|
(DonePrerollingAudio() || mMaster->IsWaitingAudioData()) &&
|
|
|
|
(DonePrerollingVideo() || mMaster->IsWaitingVideoData())) {
|
2016-10-05 12:30:38 +03:00
|
|
|
mIsPrerolling = false;
|
2016-10-05 12:15:27 +03:00
|
|
|
// Check if we can start playback.
|
|
|
|
mMaster->ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-20 09:45:05 +03:00
|
|
|
void StartDormantTimer()
|
|
|
|
{
|
2016-11-09 09:21:16 +03:00
|
|
|
if (!mMaster->mMediaSeekable) {
|
|
|
|
// Don't enter dormant if the media is not seekable because we need to
|
|
|
|
// seek when exiting dormant.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-20 09:45:05 +03:00
|
|
|
auto timeout = MediaPrefs::DormantOnPauseTimeout();
|
|
|
|
if (timeout < 0) {
|
|
|
|
// Disabled when timeout is negative.
|
|
|
|
return;
|
|
|
|
} else if (timeout == 0) {
|
|
|
|
// Enter dormant immediately without scheduling a timer.
|
2017-01-16 11:39:56 +03:00
|
|
|
SetState<DormantState>();
|
2016-10-20 09:45:05 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-06-05 05:38:43 +03:00
|
|
|
if (mMaster->mMinimizePreroll) {
|
|
|
|
SetState<DormantState>();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-20 09:45:05 +03:00
|
|
|
TimeStamp target = TimeStamp::Now() +
|
|
|
|
TimeDuration::FromMilliseconds(timeout);
|
|
|
|
|
|
|
|
mDormantTimer.Ensure(target,
|
|
|
|
[this] () {
|
|
|
|
mDormantTimer.CompleteRequest();
|
2017-01-16 11:39:56 +03:00
|
|
|
SetState<DormantState>();
|
2016-10-20 09:45:05 +03:00
|
|
|
}, [this] () {
|
|
|
|
mDormantTimer.CompleteRequest();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2016-09-06 11:59:02 +03:00
|
|
|
// Time at which we started decoding.
|
|
|
|
TimeStamp mDecodeStartTime;
|
2016-10-05 12:30:38 +03:00
|
|
|
|
|
|
|
// When we start decoding (either for the first time, or after a pause)
|
|
|
|
// we may be low on decoded data. We don't want our "low data" logic to
|
|
|
|
// kick in and decide that we're low on decoded data because the download
|
|
|
|
// can't keep up with the decode, and cause us to pause playback. So we
|
|
|
|
// have a "preroll" stage, where we ignore the results of our "low data"
|
|
|
|
// logic during the first few frames of our decode. This occurs during
|
|
|
|
// playback.
|
|
|
|
bool mIsPrerolling = true;
|
2016-10-20 09:45:05 +03:00
|
|
|
|
|
|
|
// Fired when playback is paused for a while to enter dormant.
|
|
|
|
DelayedScheduler mDormantTimer;
|
2016-12-22 12:28:14 +03:00
|
|
|
|
|
|
|
MediaEventListener mOnAudioPopped;
|
|
|
|
MediaEventListener mOnVideoPopped;
|
2016-09-06 06:14:09 +03:00
|
|
|
};
|
|
|
|
|
2016-10-21 06:37:00 +03:00
|
|
|
/**
|
|
|
|
* Purpose: seek to a particular new playback position.
|
|
|
|
*
|
|
|
|
* Transition to:
|
|
|
|
* SEEKING if any new seek request.
|
|
|
|
* SHUTDOWN if seek failed.
|
|
|
|
* COMPLETED if the new playback position is the end of the media resource.
|
2017-05-25 10:43:19 +03:00
|
|
|
* NextFrameSeekingState if completing a NextFrameSeekingFromDormantState.
|
2016-10-21 06:37:00 +03:00
|
|
|
* DECODING otherwise.
|
|
|
|
*/
|
2016-09-06 06:20:12 +03:00
|
|
|
class MediaDecoderStateMachine::SeekingState
|
|
|
|
: public MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2017-01-27 15:20:37 +03:00
|
|
|
explicit SeekingState(Master* aPtr) : StateObject(aPtr) { }
|
2016-10-04 12:18:49 +03:00
|
|
|
|
2017-02-13 20:07:40 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> Enter(SeekJob&& aSeekJob,
|
2016-11-03 10:32:51 +03:00
|
|
|
EventVisibility aVisibility)
|
2016-10-04 12:18:49 +03:00
|
|
|
{
|
2016-10-13 08:53:32 +03:00
|
|
|
mSeekJob = Move(aSeekJob);
|
2017-07-06 05:59:16 +03:00
|
|
|
mVisibility = aVisibility;
|
2016-10-13 08:53:32 +03:00
|
|
|
|
2016-10-18 06:11:38 +03:00
|
|
|
// Always switch off the blank decoder otherwise we might become visible
|
|
|
|
// in the middle of seeking and won't have a valid video frame to show
|
|
|
|
// when seek is done.
|
|
|
|
if (mMaster->mVideoDecodeSuspended) {
|
|
|
|
mMaster->mVideoDecodeSuspended = false;
|
|
|
|
mMaster->mOnPlaybackEvent.Notify(MediaEventType::ExitVideoSuspend);
|
|
|
|
Reader()->SetVideoBlankDecode(false);
|
|
|
|
}
|
|
|
|
|
2017-07-06 05:59:16 +03:00
|
|
|
// Suppressed visibility comes from two cases: (1) leaving dormant state,
|
|
|
|
// and (2) resuming suspended video decoder. We want both cases to be
|
|
|
|
// transparent to the user. So we only notify the change when the seek
|
|
|
|
// request is from the user.
|
|
|
|
if (mVisibility == EventVisibility::Observable) {
|
|
|
|
// Don't stop playback for a video-only seek since we want to keep playing
|
|
|
|
// audio and we don't need to stop playback while leaving dormant for the
|
|
|
|
// playback should has been stopped.
|
2016-10-04 12:18:49 +03:00
|
|
|
mMaster->StopPlayback();
|
2017-07-06 05:59:16 +03:00
|
|
|
mMaster->UpdatePlaybackPositionInternal(mSeekJob.mTarget->GetTime());
|
2016-10-04 12:18:49 +03:00
|
|
|
mMaster->mOnPlaybackEvent.Notify(MediaEventType::SeekStarted);
|
2017-01-27 15:20:37 +03:00
|
|
|
mMaster->UpdateNextFrameStatus(
|
|
|
|
MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_SEEKING);
|
2016-10-04 12:18:49 +03:00
|
|
|
}
|
|
|
|
|
2017-05-25 07:06:34 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> p = mSeekJob.mPromise.Ensure(__func__);
|
|
|
|
|
2016-11-29 13:37:27 +03:00
|
|
|
DoSeek();
|
2016-10-04 12:18:49 +03:00
|
|
|
|
2017-05-25 07:06:34 +03:00
|
|
|
return p;
|
2016-10-04 12:18:49 +03:00
|
|
|
}
|
2016-09-06 06:20:12 +03:00
|
|
|
|
2016-12-16 10:29:50 +03:00
|
|
|
virtual void Exit() override = 0;
|
2016-10-05 09:22:54 +03:00
|
|
|
|
2016-09-06 06:20:12 +03:00
|
|
|
State GetState() const override
|
|
|
|
{
|
|
|
|
return DECODER_STATE_SEEKING;
|
|
|
|
}
|
2016-09-12 05:41:20 +03:00
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleAudioDecoded(AudioData* aAudio) override = 0;
|
|
|
|
void HandleVideoDecoded(VideoData* aVideo,
|
2017-01-27 15:20:37 +03:00
|
|
|
TimeStamp aDecodeStart) override = 0;
|
2016-12-16 10:29:50 +03:00
|
|
|
void HandleAudioWaited(MediaData::Type aType) override = 0;
|
|
|
|
void HandleVideoWaited(MediaData::Type aType) override = 0;
|
2016-10-04 11:33:40 +03:00
|
|
|
|
2016-10-18 06:11:38 +03:00
|
|
|
void HandleVideoSuspendTimeout() override
|
|
|
|
{
|
|
|
|
// Do nothing since we want a valid video frame to show when seek is done.
|
|
|
|
}
|
|
|
|
|
2017-03-28 06:21:41 +03:00
|
|
|
void HandleResumeVideoDecoding(const TimeUnit&) override
|
2016-10-18 09:56:36 +03:00
|
|
|
{
|
|
|
|
// We set mVideoDecodeSuspended to false in Enter().
|
|
|
|
MOZ_ASSERT(false, "Shouldn't have suspended video decoding.");
|
|
|
|
}
|
|
|
|
|
2016-11-30 05:32:29 +03:00
|
|
|
protected:
|
|
|
|
SeekJob mSeekJob;
|
2017-07-06 05:59:16 +03:00
|
|
|
EventVisibility mVisibility;
|
2016-11-29 13:37:27 +03:00
|
|
|
|
|
|
|
virtual void DoSeek() = 0;
|
2017-05-25 10:09:10 +03:00
|
|
|
// Transition to the next state (defined by the subclass) when seek is completed.
|
|
|
|
virtual void GoToNextState() { SetState<DecodingState>(); }
|
2017-05-24 09:05:29 +03:00
|
|
|
void SeekCompleted();
|
2017-03-28 11:44:40 +03:00
|
|
|
virtual TimeUnit CalculateNewCurrentTime() const = 0;
|
2016-09-06 06:20:12 +03:00
|
|
|
};
|
|
|
|
|
2016-11-30 05:32:29 +03:00
|
|
|
class MediaDecoderStateMachine::AccurateSeekingState
|
|
|
|
: public MediaDecoderStateMachine::SeekingState
|
|
|
|
{
|
|
|
|
public:
|
2017-05-22 10:33:18 +03:00
|
|
|
explicit AccurateSeekingState(Master* aPtr) : SeekingState(aPtr) { }
|
2016-11-30 05:32:29 +03:00
|
|
|
|
2017-02-13 20:07:40 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> Enter(SeekJob&& aSeekJob,
|
2016-11-30 05:32:29 +03:00
|
|
|
EventVisibility aVisibility)
|
|
|
|
{
|
2016-12-19 07:25:14 +03:00
|
|
|
MOZ_ASSERT(aSeekJob.mTarget->IsAccurate() || aSeekJob.mTarget->IsFast());
|
2017-03-28 11:16:56 +03:00
|
|
|
mCurrentTimeBeforeSeek = mMaster->GetMediaTime();
|
2016-11-30 05:32:29 +03:00
|
|
|
return SeekingState::Enter(Move(aSeekJob), aVisibility);
|
|
|
|
}
|
2016-11-29 12:42:57 +03:00
|
|
|
|
2016-12-09 06:40:03 +03:00
|
|
|
void Exit() override
|
|
|
|
{
|
2016-12-10 04:21:06 +03:00
|
|
|
// Disconnect MediaDecoder.
|
|
|
|
mSeekJob.RejectIfExists(__func__);
|
2016-12-09 06:40:03 +03:00
|
|
|
|
2017-07-27 09:49:57 +03:00
|
|
|
// Disconnect ReaderProxy.
|
2016-12-09 06:40:03 +03:00
|
|
|
mSeekRequest.DisconnectIfExists();
|
2016-12-16 12:30:28 +03:00
|
|
|
|
|
|
|
mWaitRequest.DisconnectIfExists();
|
2016-12-09 06:40:03 +03:00
|
|
|
}
|
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleAudioDecoded(AudioData* aAudio) override
|
2016-12-10 00:24:43 +03:00
|
|
|
{
|
2017-01-27 15:20:37 +03:00
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking || !mDoneVideoSeeking,
|
|
|
|
"Seek shouldn't be finished");
|
2016-12-16 10:11:19 +03:00
|
|
|
MOZ_ASSERT(aAudio);
|
2016-12-10 00:24:43 +03:00
|
|
|
|
2016-12-16 10:11:19 +03:00
|
|
|
AdjustFastSeekIfNeeded(aAudio);
|
2016-12-10 00:24:43 +03:00
|
|
|
|
2016-12-19 07:25:14 +03:00
|
|
|
if (mSeekJob.mTarget->IsFast()) {
|
2016-12-10 00:24:43 +03:00
|
|
|
// Non-precise seek; we can stop the seek at the first sample.
|
2016-12-21 10:51:45 +03:00
|
|
|
mMaster->PushAudio(aAudio);
|
2016-12-10 03:33:54 +03:00
|
|
|
mDoneAudioSeeking = true;
|
2016-12-10 00:24:43 +03:00
|
|
|
} else {
|
2017-03-22 06:59:54 +03:00
|
|
|
nsresult rv = DropAudioUpToSeekTarget(aAudio);
|
2016-12-10 00:24:43 +03:00
|
|
|
if (NS_FAILED(rv)) {
|
2016-12-16 11:09:35 +03:00
|
|
|
mMaster->DecodeError(rv);
|
2016-12-10 00:24:43 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-10 03:33:54 +03:00
|
|
|
if (!mDoneAudioSeeking) {
|
2016-12-10 00:24:43 +03:00
|
|
|
RequestAudioData();
|
|
|
|
return;
|
|
|
|
}
|
2016-12-10 02:25:08 +03:00
|
|
|
MaybeFinishSeek();
|
2016-12-10 00:24:43 +03:00
|
|
|
}
|
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleVideoDecoded(VideoData* aVideo, TimeStamp aDecodeStart) override
|
2016-12-10 00:24:43 +03:00
|
|
|
{
|
2017-01-27 15:20:37 +03:00
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking || !mDoneVideoSeeking,
|
|
|
|
"Seek shouldn't be finished");
|
2016-12-16 10:11:19 +03:00
|
|
|
MOZ_ASSERT(aVideo);
|
2016-12-10 00:24:43 +03:00
|
|
|
|
2016-12-16 10:11:19 +03:00
|
|
|
AdjustFastSeekIfNeeded(aVideo);
|
2016-12-10 00:24:43 +03:00
|
|
|
|
2016-12-19 07:25:14 +03:00
|
|
|
if (mSeekJob.mTarget->IsFast()) {
|
2016-12-10 00:24:43 +03:00
|
|
|
// Non-precise seek. We can stop the seek at the first sample.
|
2016-12-21 10:51:45 +03:00
|
|
|
mMaster->PushVideo(aVideo);
|
2016-12-10 03:33:54 +03:00
|
|
|
mDoneVideoSeeking = true;
|
2016-12-10 00:24:43 +03:00
|
|
|
} else {
|
2016-12-16 10:11:19 +03:00
|
|
|
nsresult rv = DropVideoUpToSeekTarget(aVideo);
|
2016-12-10 00:24:43 +03:00
|
|
|
if (NS_FAILED(rv)) {
|
2016-12-16 11:09:35 +03:00
|
|
|
mMaster->DecodeError(rv);
|
2016-12-10 00:24:43 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-10 03:33:54 +03:00
|
|
|
if (!mDoneVideoSeeking) {
|
2016-12-10 00:24:43 +03:00
|
|
|
RequestVideoData();
|
|
|
|
return;
|
|
|
|
}
|
2016-12-10 02:25:08 +03:00
|
|
|
MaybeFinishSeek();
|
2016-12-10 00:24:43 +03:00
|
|
|
}
|
|
|
|
|
2017-01-06 12:43:05 +03:00
|
|
|
void HandleWaitingForAudio() override
|
|
|
|
{
|
2017-07-06 06:47:21 +03:00
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking);
|
|
|
|
mMaster->WaitForData(MediaData::AUDIO_DATA);
|
2017-01-06 12:43:05 +03:00
|
|
|
}
|
|
|
|
|
2017-01-06 12:49:11 +03:00
|
|
|
void HandleAudioCanceled() override
|
|
|
|
{
|
2017-07-06 06:47:21 +03:00
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking);
|
|
|
|
RequestAudioData();
|
2017-01-06 12:49:11 +03:00
|
|
|
}
|
|
|
|
|
2017-01-06 12:54:59 +03:00
|
|
|
void HandleEndOfAudio() override
|
|
|
|
{
|
2017-07-06 06:47:21 +03:00
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking);
|
|
|
|
AudioQueue().Finish();
|
|
|
|
mDoneAudioSeeking = true;
|
|
|
|
MaybeFinishSeek();
|
2017-01-06 12:54:59 +03:00
|
|
|
}
|
|
|
|
|
2017-01-09 07:59:32 +03:00
|
|
|
void HandleWaitingForVideo() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mDoneVideoSeeking);
|
|
|
|
mMaster->WaitForData(MediaData::VIDEO_DATA);
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoCanceled() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mDoneVideoSeeking);
|
|
|
|
RequestVideoData();
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleEndOfVideo() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mDoneVideoSeeking);
|
|
|
|
if (mFirstVideoFrameAfterSeek) {
|
|
|
|
// Hit the end of stream. Move mFirstVideoFrameAfterSeek into
|
|
|
|
// mSeekedVideoData so we have something to display after seeking.
|
|
|
|
mMaster->PushVideo(mFirstVideoFrameAfterSeek);
|
|
|
|
}
|
|
|
|
VideoQueue().Finish();
|
|
|
|
mDoneVideoSeeking = true;
|
|
|
|
MaybeFinishSeek();
|
|
|
|
}
|
2016-12-10 00:24:43 +03:00
|
|
|
|
|
|
|
void HandleAudioWaited(MediaData::Type aType) override
|
|
|
|
{
|
2017-01-27 15:20:37 +03:00
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking || !mDoneVideoSeeking,
|
|
|
|
"Seek shouldn't be finished");
|
2016-12-10 00:24:43 +03:00
|
|
|
|
|
|
|
RequestAudioData();
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoWaited(MediaData::Type aType) override
|
|
|
|
{
|
2017-01-27 15:20:37 +03:00
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking || !mDoneVideoSeeking,
|
|
|
|
"Seek shouldn't be finished");
|
2016-12-10 00:24:43 +03:00
|
|
|
|
|
|
|
RequestVideoData();
|
|
|
|
}
|
|
|
|
|
2016-12-19 08:13:32 +03:00
|
|
|
void DoSeek() override
|
2016-11-29 12:42:57 +03:00
|
|
|
{
|
2017-07-06 06:51:09 +03:00
|
|
|
mDoneAudioSeeking = !Info().HasAudio();
|
2016-12-10 03:33:54 +03:00
|
|
|
mDoneVideoSeeking = !Info().HasVideo();
|
2016-11-29 13:06:34 +03:00
|
|
|
|
2017-07-06 06:51:09 +03:00
|
|
|
mMaster->ResetDecode();
|
|
|
|
mMaster->StopMediaSink();
|
2016-11-29 13:37:27 +03:00
|
|
|
|
2016-12-16 12:30:28 +03:00
|
|
|
DemuxerSeek();
|
2016-11-29 13:37:27 +03:00
|
|
|
}
|
2016-12-10 07:47:27 +03:00
|
|
|
|
2017-03-28 11:44:40 +03:00
|
|
|
TimeUnit CalculateNewCurrentTime() const override
|
2016-12-10 02:35:11 +03:00
|
|
|
{
|
2017-03-28 11:44:40 +03:00
|
|
|
const auto seekTime = mSeekJob.mTarget->GetTime();
|
2016-12-10 02:48:02 +03:00
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
// For the accurate seek, we always set the newCurrentTime = seekTime so
|
|
|
|
// that the updated HTMLMediaElement.currentTime will always be the seek
|
|
|
|
// target; we rely on the MediaSink to handles the gap between the
|
|
|
|
// newCurrentTime and the real decoded samples' start time.
|
2016-12-19 07:25:14 +03:00
|
|
|
if (mSeekJob.mTarget->IsAccurate()) {
|
2016-12-10 02:48:02 +03:00
|
|
|
return seekTime;
|
|
|
|
}
|
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
// For the fast seek, we update the newCurrentTime with the decoded audio
|
|
|
|
// and video samples, set it to be the one which is closet to the seekTime.
|
2016-12-19 07:25:14 +03:00
|
|
|
if (mSeekJob.mTarget->IsFast()) {
|
2017-03-22 06:59:54 +03:00
|
|
|
RefPtr<AudioData> audio = AudioQueue().PeekFront();
|
|
|
|
RefPtr<VideoData> video = VideoQueue().PeekFront();
|
2016-12-10 02:48:02 +03:00
|
|
|
|
|
|
|
// A situation that both audio and video approaches the end.
|
2016-12-16 11:01:57 +03:00
|
|
|
if (!audio && !video) {
|
2016-12-10 02:48:02 +03:00
|
|
|
return seekTime;
|
|
|
|
}
|
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
const int64_t audioStart =
|
|
|
|
audio ? audio->mTime.ToMicroseconds() : INT64_MAX;
|
|
|
|
const int64_t videoStart =
|
|
|
|
video ? video->mTime.ToMicroseconds() : INT64_MAX;
|
2017-03-28 11:44:40 +03:00
|
|
|
const int64_t audioGap = std::abs(audioStart - seekTime.ToMicroseconds());
|
|
|
|
const int64_t videoGap = std::abs(videoStart - seekTime.ToMicroseconds());
|
|
|
|
return TimeUnit::FromMicroseconds(
|
|
|
|
audioGap <= videoGap ? audioStart : videoStart);
|
2016-12-10 02:48:02 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
MOZ_ASSERT(false, "AccurateSeekTask doesn't handle other seek types.");
|
2017-03-28 11:44:40 +03:00
|
|
|
return TimeUnit::Zero();
|
2016-12-10 02:35:11 +03:00
|
|
|
}
|
|
|
|
|
2017-07-03 10:32:39 +03:00
|
|
|
protected:
|
2017-05-24 09:05:29 +03:00
|
|
|
void DemuxerSeek()
|
|
|
|
{
|
|
|
|
// Request the demuxer to perform seek.
|
|
|
|
Reader()->Seek(mSeekJob.mTarget.ref())
|
|
|
|
->Then(OwnerThread(), __func__,
|
|
|
|
[this] (const media::TimeUnit& aUnit) {
|
|
|
|
OnSeekResolved(aUnit);
|
|
|
|
},
|
|
|
|
[this] (const SeekRejectValue& aReject) {
|
|
|
|
OnSeekRejected(aReject);
|
|
|
|
})
|
|
|
|
->Track(mSeekRequest);
|
|
|
|
}
|
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
void OnSeekResolved(media::TimeUnit)
|
|
|
|
{
|
2016-12-09 10:02:45 +03:00
|
|
|
mSeekRequest.Complete();
|
|
|
|
|
|
|
|
// We must decode the first samples of active streams, so we can determine
|
|
|
|
// the new stream time. So dispatch tasks to do that.
|
2016-12-10 03:33:54 +03:00
|
|
|
if (!mDoneVideoSeeking) {
|
2016-12-09 23:34:40 +03:00
|
|
|
RequestVideoData();
|
2016-12-09 10:02:45 +03:00
|
|
|
}
|
2016-12-10 03:33:54 +03:00
|
|
|
if (!mDoneAudioSeeking) {
|
2016-12-09 23:34:40 +03:00
|
|
|
RequestAudioData();
|
2016-12-09 10:02:45 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
void OnSeekRejected(const SeekRejectValue& aReject)
|
|
|
|
{
|
2016-12-09 10:02:45 +03:00
|
|
|
mSeekRequest.Complete();
|
2016-12-16 12:30:28 +03:00
|
|
|
|
|
|
|
if (aReject.mError == NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA) {
|
|
|
|
SLOG("OnSeekRejected reason=WAITING_FOR_DATA type=%d", aReject.mType);
|
Bug 1349253 - modify the assertion; r=jwwang
While MDSM calls MFR::Seek(), MFR tries to do video seek first and then the audio seek.
Video-seek and audio-seek are applied sequentially, and if something wrong in video-seek,
MFR discards the whole seek operation without applying audio-seek.
video | audio |
waiting | waiting | What MDSM receives
-----------------------------------------------------------------------------
X | X | resove mSeekRequest
-----------------------------------------------------------------------------
O | X | reject mSeekRequest with type=VIDEO, error=WAITING
-----------------------------------------------------------------------------
X | O | reject mSeekRequest with type=AUDIO, error=WAITING
-----------------------------------------------------------------------------
O | O | reject mSeekRequest with type=VIDEO, error=WAITING
-----------------------------------------------------------------------------
So, here, AccurateSeekingState::OnSeekRejected() has a unified code to handle
WAITING_FOR_DATA error for both video and audio type, and it uses the
aReject.mType variable to distinguish different types.
But, it mixes the assertions. We should also apply assertions according to the
type that is in concern.
MozReview-Commit-ID: F7RpnFghcKk
--HG--
extra : rebase_source : d18c3197ec2a08f2f184150e0c4a08da200a34b0
2017-06-07 13:52:36 +03:00
|
|
|
MOZ_ASSERT_IF(aReject.mType == MediaData::AUDIO_DATA, !mMaster->IsRequestingAudioData());
|
|
|
|
MOZ_ASSERT_IF(aReject.mType == MediaData::VIDEO_DATA, !mMaster->IsRequestingVideoData());
|
|
|
|
MOZ_ASSERT_IF(aReject.mType == MediaData::AUDIO_DATA, !mMaster->IsWaitingAudioData());
|
|
|
|
MOZ_ASSERT_IF(aReject.mType == MediaData::VIDEO_DATA, !mMaster->IsWaitingVideoData());
|
|
|
|
|
2016-12-16 12:30:28 +03:00
|
|
|
// Fire 'waiting' to notify the player that we are waiting for data.
|
2017-01-27 15:20:37 +03:00
|
|
|
mMaster->UpdateNextFrameStatus(
|
|
|
|
MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_SEEKING);
|
|
|
|
Reader()
|
|
|
|
->WaitForData(aReject.mType)
|
|
|
|
->Then(OwnerThread(), __func__,
|
|
|
|
[this](MediaData::Type aType) {
|
|
|
|
SLOG("OnSeekRejected wait promise resolved");
|
|
|
|
mWaitRequest.Complete();
|
|
|
|
DemuxerSeek();
|
|
|
|
},
|
|
|
|
[this](const WaitForDataRejectValue& aRejection) {
|
|
|
|
SLOG("OnSeekRejected wait promise rejected");
|
|
|
|
mWaitRequest.Complete();
|
|
|
|
mMaster->DecodeError(NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA);
|
|
|
|
})
|
|
|
|
->Track(mWaitRequest);
|
2016-12-16 12:30:28 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-04-26 06:32:50 +03:00
|
|
|
if (aReject.mError == NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
|
|
|
|
HandleEndOfAudio();
|
|
|
|
HandleEndOfVideo();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
MOZ_ASSERT(NS_FAILED(aReject.mError),
|
|
|
|
"Cancels should also disconnect mSeekRequest");
|
2016-12-16 11:57:27 +03:00
|
|
|
mMaster->DecodeError(aReject.mError);
|
2016-12-09 10:02:45 +03:00
|
|
|
}
|
|
|
|
|
2016-12-09 23:34:40 +03:00
|
|
|
void RequestAudioData()
|
|
|
|
{
|
2016-12-10 03:33:54 +03:00
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking);
|
2016-12-20 07:54:23 +03:00
|
|
|
mMaster->RequestAudioData();
|
2016-12-09 23:34:40 +03:00
|
|
|
}
|
|
|
|
|
2017-07-11 04:59:13 +03:00
|
|
|
virtual void RequestVideoData()
|
2016-12-09 23:34:40 +03:00
|
|
|
{
|
2016-12-10 03:33:54 +03:00
|
|
|
MOZ_ASSERT(!mDoneVideoSeeking);
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(media::TimeUnit());
|
2016-12-09 23:34:40 +03:00
|
|
|
}
|
|
|
|
|
2016-12-10 01:34:44 +03:00
|
|
|
void AdjustFastSeekIfNeeded(MediaData* aSample)
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
if (mSeekJob.mTarget->IsFast() &&
|
|
|
|
mSeekJob.mTarget->GetTime() > mCurrentTimeBeforeSeek &&
|
|
|
|
aSample->mTime < mCurrentTimeBeforeSeek) {
|
2016-12-10 01:34:44 +03:00
|
|
|
// We are doing a fastSeek, but we ended up *before* the previous
|
|
|
|
// playback position. This is surprising UX, so switch to an accurate
|
|
|
|
// seek and decode to the seek target. This is not conformant to the
|
|
|
|
// spec, fastSeek should always be fast, but until we get the time to
|
|
|
|
// change all Readers to seek to the keyframe after the currentTime
|
|
|
|
// in this case, we'll just decode forward. Bug 1026330.
|
2016-12-19 07:25:14 +03:00
|
|
|
mSeekJob.mTarget->SetType(SeekTarget::Accurate);
|
2016-12-10 01:34:44 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-16 10:11:19 +03:00
|
|
|
nsresult DropAudioUpToSeekTarget(AudioData* aAudio)
|
2016-12-10 02:20:10 +03:00
|
|
|
{
|
2016-12-19 07:25:14 +03:00
|
|
|
MOZ_ASSERT(aAudio && mSeekJob.mTarget->IsAccurate());
|
2016-12-10 02:20:10 +03:00
|
|
|
|
2017-03-29 09:46:15 +03:00
|
|
|
auto sampleDuration = FramesToTimeUnit(
|
|
|
|
aAudio->mFrames, Info().mAudio.mRate);
|
|
|
|
if (!sampleDuration.IsValid()) {
|
2016-12-10 02:20:10 +03:00
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
|
|
|
}
|
|
|
|
|
2017-04-14 12:13:36 +03:00
|
|
|
auto audioTime = aAudio->mTime;
|
2017-03-29 09:46:15 +03:00
|
|
|
if (audioTime + sampleDuration <= mSeekJob.mTarget->GetTime()) {
|
2016-12-10 02:20:10 +03:00
|
|
|
// Our seek target lies after the frames in this AudioData. Don't
|
|
|
|
// push it onto the audio queue, and keep decoding forwards.
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2017-03-29 09:46:15 +03:00
|
|
|
if (audioTime > mSeekJob.mTarget->GetTime()) {
|
2016-12-10 02:20:10 +03:00
|
|
|
// The seek target doesn't lie in the audio block just after the last
|
|
|
|
// audio frames we've seen which were before the seek target. This
|
|
|
|
// could have been the first audio data we've seen after seek, i.e. the
|
|
|
|
// seek terminated after the seek target in the audio stream. Just
|
|
|
|
// abort the audio decode-to-target, the state machine will play
|
|
|
|
// silence to cover the gap. Typically this happens in poorly muxed
|
|
|
|
// files.
|
2017-03-24 07:42:49 +03:00
|
|
|
SLOGW("Audio not synced after seek, maybe a poorly muxed file?");
|
2016-12-21 10:51:45 +03:00
|
|
|
mMaster->PushAudio(aAudio);
|
2016-12-10 03:33:54 +03:00
|
|
|
mDoneAudioSeeking = true;
|
2016-12-10 02:20:10 +03:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The seek target lies somewhere in this AudioData's frames, strip off
|
|
|
|
// any frames which lie before the seek target, so we'll begin playback
|
|
|
|
// exactly at the seek target.
|
2017-03-29 09:46:15 +03:00
|
|
|
NS_ASSERTION(mSeekJob.mTarget->GetTime() >= audioTime,
|
2016-12-10 02:20:10 +03:00
|
|
|
"Target must at or be after data start.");
|
2017-03-29 09:46:15 +03:00
|
|
|
NS_ASSERTION(mSeekJob.mTarget->GetTime() < audioTime + sampleDuration,
|
2016-12-10 02:20:10 +03:00
|
|
|
"Data must end after target.");
|
|
|
|
|
2017-03-29 09:46:15 +03:00
|
|
|
CheckedInt64 framesToPrune = TimeUnitToFrames(
|
|
|
|
mSeekJob.mTarget->GetTime() - audioTime, Info().mAudio.mRate);
|
2016-12-10 02:20:10 +03:00
|
|
|
if (!framesToPrune.isValid()) {
|
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
|
|
|
}
|
2016-12-16 10:11:19 +03:00
|
|
|
if (framesToPrune.value() > aAudio->mFrames) {
|
2016-12-10 02:20:10 +03:00
|
|
|
// We've messed up somehow. Don't try to trim frames, the |frames|
|
|
|
|
// variable below will overflow.
|
2017-03-24 07:42:49 +03:00
|
|
|
SLOGW("Can't prune more frames that we have!");
|
2016-12-10 02:20:10 +03:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2017-03-29 09:46:15 +03:00
|
|
|
uint32_t frames = aAudio->mFrames - uint32_t(framesToPrune.value());
|
2016-12-16 10:11:19 +03:00
|
|
|
uint32_t channels = aAudio->mChannels;
|
2016-12-10 02:20:10 +03:00
|
|
|
AlignedAudioBuffer audioData(frames * channels);
|
|
|
|
if (!audioData) {
|
|
|
|
return NS_ERROR_OUT_OF_MEMORY;
|
|
|
|
}
|
|
|
|
|
|
|
|
memcpy(audioData.get(),
|
2016-12-16 10:11:19 +03:00
|
|
|
aAudio->mAudioData.get() + (framesToPrune.value() * channels),
|
2016-12-10 02:20:10 +03:00
|
|
|
frames * channels * sizeof(AudioDataValue));
|
2017-03-29 09:46:15 +03:00
|
|
|
auto duration = FramesToTimeUnit(frames, Info().mAudio.mRate);
|
|
|
|
if (!duration.IsValid()) {
|
2016-12-10 02:20:10 +03:00
|
|
|
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
|
|
|
|
}
|
2017-01-27 15:20:37 +03:00
|
|
|
RefPtr<AudioData> data(new AudioData(
|
2017-04-24 12:33:05 +03:00
|
|
|
aAudio->mOffset, mSeekJob.mTarget->GetTime(),
|
|
|
|
duration, frames, Move(audioData), channels,
|
2017-03-29 09:46:15 +03:00
|
|
|
aAudio->mRate));
|
2017-01-27 15:20:37 +03:00
|
|
|
MOZ_ASSERT(AudioQueue().GetSize() == 0,
|
|
|
|
"Should be the 1st sample after seeking");
|
2016-12-21 10:51:45 +03:00
|
|
|
mMaster->PushAudio(data);
|
2016-12-10 03:33:54 +03:00
|
|
|
mDoneAudioSeeking = true;
|
2016-12-10 02:20:10 +03:00
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
nsresult DropVideoUpToSeekTarget(VideoData* aVideo)
|
2016-12-10 02:20:10 +03:00
|
|
|
{
|
2017-03-22 06:59:54 +03:00
|
|
|
MOZ_ASSERT(aVideo);
|
2016-12-16 06:16:31 +03:00
|
|
|
SLOG("DropVideoUpToSeekTarget() frame [%" PRId64 ", %" PRId64 "]",
|
2017-04-14 12:13:36 +03:00
|
|
|
aVideo->mTime.ToMicroseconds(), aVideo->GetEndTime().ToMicroseconds());
|
2017-07-11 04:56:09 +03:00
|
|
|
const auto target = GetSeekTarget();
|
2016-12-10 02:20:10 +03:00
|
|
|
|
|
|
|
// If the frame end time is less than the seek target, we won't want
|
|
|
|
// to display this frame after the seek, so discard it.
|
2017-03-22 06:59:54 +03:00
|
|
|
if (target >= aVideo->GetEndTime()) {
|
2017-05-22 10:33:18 +03:00
|
|
|
SLOG("DropVideoUpToSeekTarget() pop video frame [%" PRId64 ", %" PRId64
|
|
|
|
"] target=%" PRId64,
|
|
|
|
aVideo->mTime.ToMicroseconds(),
|
|
|
|
aVideo->GetEndTime().ToMicroseconds(),
|
2017-04-14 09:14:08 +03:00
|
|
|
target.ToMicroseconds());
|
2017-03-22 06:59:54 +03:00
|
|
|
mFirstVideoFrameAfterSeek = aVideo;
|
2016-12-10 02:20:10 +03:00
|
|
|
} else {
|
2017-04-14 12:13:36 +03:00
|
|
|
if (target >= aVideo->mTime &&
|
2017-04-14 09:14:08 +03:00
|
|
|
aVideo->GetEndTime() >= target) {
|
2017-01-27 15:20:37 +03:00
|
|
|
// The seek target lies inside this frame's time slice. Adjust the
|
|
|
|
// frame's start time to match the seek target.
|
2017-04-14 09:17:04 +03:00
|
|
|
aVideo->UpdateTimestamp(target);
|
2016-12-10 02:20:10 +03:00
|
|
|
}
|
2016-12-10 03:33:54 +03:00
|
|
|
mFirstVideoFrameAfterSeek = nullptr;
|
2016-12-10 02:20:10 +03:00
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
SLOG("DropVideoUpToSeekTarget() found video frame [%" PRId64 ", %" PRId64
|
|
|
|
"] containing target=%" PRId64,
|
|
|
|
aVideo->mTime.ToMicroseconds(),
|
|
|
|
aVideo->GetEndTime().ToMicroseconds(),
|
2017-04-14 09:14:08 +03:00
|
|
|
target.ToMicroseconds());
|
2016-12-10 02:20:10 +03:00
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
MOZ_ASSERT(VideoQueue().GetSize() == 0,
|
|
|
|
"Should be the 1st sample after seeking");
|
2017-03-22 06:59:54 +03:00
|
|
|
mMaster->PushVideo(aVideo);
|
2016-12-10 03:33:54 +03:00
|
|
|
mDoneVideoSeeking = true;
|
2016-12-10 02:20:10 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2016-12-10 02:25:08 +03:00
|
|
|
void MaybeFinishSeek()
|
|
|
|
{
|
2016-12-10 03:33:54 +03:00
|
|
|
if (mDoneAudioSeeking && mDoneVideoSeeking) {
|
2016-12-16 11:09:35 +03:00
|
|
|
SeekCompleted();
|
2016-12-10 02:25:08 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-09 06:40:03 +03:00
|
|
|
/*
|
|
|
|
* Track the current seek promise made by the reader.
|
|
|
|
*/
|
2017-07-19 12:34:05 +03:00
|
|
|
MozPromiseRequestHolder<MediaFormatReader::SeekPromise> mSeekRequest;
|
2016-12-09 06:40:03 +03:00
|
|
|
|
2016-12-10 03:33:54 +03:00
|
|
|
/*
|
|
|
|
* Internal state.
|
|
|
|
*/
|
|
|
|
media::TimeUnit mCurrentTimeBeforeSeek;
|
|
|
|
bool mDoneAudioSeeking = false;
|
|
|
|
bool mDoneVideoSeeking = false;
|
2016-12-16 12:30:28 +03:00
|
|
|
MozPromiseRequestHolder<WaitForDataPromise> mWaitRequest;
|
2016-12-10 03:33:54 +03:00
|
|
|
|
|
|
|
// This temporarily stores the first frame we decode after we seek.
|
|
|
|
// This is so that if we hit end of stream while we're decoding to reach
|
|
|
|
// the seek target, we will still have a frame that we can display as the
|
|
|
|
// last frame in the media.
|
2017-03-22 06:59:54 +03:00
|
|
|
RefPtr<VideoData> mFirstVideoFrameAfterSeek;
|
2017-07-11 04:56:09 +03:00
|
|
|
|
|
|
|
private:
|
|
|
|
virtual media::TimeUnit GetSeekTarget() const
|
|
|
|
{
|
|
|
|
return mSeekJob.mTarget->GetTime();
|
|
|
|
}
|
2016-11-30 05:32:29 +03:00
|
|
|
};
|
|
|
|
|
2016-12-14 05:49:24 +03:00
|
|
|
/*
|
|
|
|
* Remove samples from the queue until aCompare() returns false.
|
|
|
|
* aCompare A function object with the signature bool(int64_t) which returns
|
|
|
|
* true for samples that should be removed.
|
|
|
|
*/
|
2017-03-22 06:59:54 +03:00
|
|
|
template <typename Type, typename Function>
|
|
|
|
static void
|
|
|
|
DiscardFrames(MediaQueue<Type>& aQueue, const Function& aCompare)
|
2016-12-14 05:49:24 +03:00
|
|
|
{
|
|
|
|
while(aQueue.GetSize() > 0) {
|
2017-04-14 12:13:36 +03:00
|
|
|
if (aCompare(aQueue.PeekFront()->mTime.ToMicroseconds())) {
|
2017-03-22 06:59:54 +03:00
|
|
|
RefPtr<Type> releaseMe = aQueue.PopFront();
|
2016-12-14 05:49:24 +03:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-30 05:32:29 +03:00
|
|
|
class MediaDecoderStateMachine::NextFrameSeekingState
|
|
|
|
: public MediaDecoderStateMachine::SeekingState
|
|
|
|
{
|
|
|
|
public:
|
2017-05-22 10:33:18 +03:00
|
|
|
explicit NextFrameSeekingState(Master* aPtr) : SeekingState(aPtr) { }
|
2016-11-30 05:32:29 +03:00
|
|
|
|
2017-02-13 20:07:40 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> Enter(SeekJob&& aSeekJob,
|
2016-11-30 05:32:29 +03:00
|
|
|
EventVisibility aVisibility)
|
|
|
|
{
|
2016-12-19 07:25:14 +03:00
|
|
|
MOZ_ASSERT(aSeekJob.mTarget->IsNextFrame());
|
2017-03-28 11:38:26 +03:00
|
|
|
mCurrentTime = mMaster->GetMediaTime();
|
2016-12-19 08:13:32 +03:00
|
|
|
mDuration = mMaster->Duration();
|
2016-11-30 05:32:29 +03:00
|
|
|
return SeekingState::Enter(Move(aSeekJob), aVisibility);
|
|
|
|
}
|
2016-11-29 12:42:57 +03:00
|
|
|
|
2016-12-15 12:25:44 +03:00
|
|
|
void Exit() override
|
|
|
|
{
|
|
|
|
// Disconnect my async seek operation.
|
2017-05-25 07:06:34 +03:00
|
|
|
if (mAsyncSeekTask) { mAsyncSeekTask->Cancel(); }
|
2016-12-15 12:25:44 +03:00
|
|
|
|
|
|
|
// Disconnect MediaDecoder.
|
|
|
|
mSeekJob.RejectIfExists(__func__);
|
|
|
|
}
|
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleAudioDecoded(AudioData* aAudio) override
|
2016-12-14 09:51:17 +03:00
|
|
|
{
|
2016-12-21 10:56:18 +03:00
|
|
|
mMaster->PushAudio(aAudio);
|
2016-12-14 09:51:17 +03:00
|
|
|
}
|
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleVideoDecoded(VideoData* aVideo, TimeStamp aDecodeStart) override
|
2016-12-14 09:51:17 +03:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(aVideo);
|
2016-12-15 12:25:44 +03:00
|
|
|
MOZ_ASSERT(!mSeekJob.mPromise.IsEmpty(), "Seek shouldn't be finished");
|
2016-12-19 12:18:03 +03:00
|
|
|
MOZ_ASSERT(NeedMoreVideo());
|
2016-12-14 09:51:17 +03:00
|
|
|
|
2017-04-14 12:13:36 +03:00
|
|
|
if (aVideo->mTime > mCurrentTime) {
|
2016-12-21 10:56:18 +03:00
|
|
|
mMaster->PushVideo(aVideo);
|
2016-12-19 12:29:01 +03:00
|
|
|
FinishSeek();
|
2016-12-19 12:18:03 +03:00
|
|
|
} else {
|
2016-12-14 10:04:00 +03:00
|
|
|
RequestVideoData();
|
2016-12-14 09:51:17 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-06 12:43:05 +03:00
|
|
|
void HandleWaitingForAudio() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mSeekJob.mPromise.IsEmpty(), "Seek shouldn't be finished");
|
|
|
|
// We don't care about audio decode errors in this state which will be
|
|
|
|
// handled by other states after seeking.
|
|
|
|
}
|
|
|
|
|
2017-01-06 12:49:11 +03:00
|
|
|
void HandleAudioCanceled() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mSeekJob.mPromise.IsEmpty(), "Seek shouldn't be finished");
|
|
|
|
// We don't care about audio decode errors in this state which will be
|
|
|
|
// handled by other states after seeking.
|
|
|
|
}
|
|
|
|
|
2017-01-06 12:54:59 +03:00
|
|
|
void HandleEndOfAudio() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mSeekJob.mPromise.IsEmpty(), "Seek shouldn't be finished");
|
|
|
|
// We don't care about audio decode errors in this state which will be
|
|
|
|
// handled by other states after seeking.
|
|
|
|
}
|
|
|
|
|
2017-01-09 07:59:32 +03:00
|
|
|
void HandleWaitingForVideo() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mSeekJob.mPromise.IsEmpty(), "Seek shouldn't be finished");
|
|
|
|
MOZ_ASSERT(NeedMoreVideo());
|
|
|
|
mMaster->WaitForData(MediaData::VIDEO_DATA);
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoCanceled() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mSeekJob.mPromise.IsEmpty(), "Seek shouldn't be finished");
|
|
|
|
MOZ_ASSERT(NeedMoreVideo());
|
|
|
|
RequestVideoData();
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleEndOfVideo() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mSeekJob.mPromise.IsEmpty(), "Seek shouldn't be finished");
|
|
|
|
MOZ_ASSERT(NeedMoreVideo());
|
|
|
|
VideoQueue().Finish();
|
|
|
|
FinishSeek();
|
|
|
|
}
|
2016-12-14 09:51:17 +03:00
|
|
|
|
|
|
|
void HandleAudioWaited(MediaData::Type aType) override
|
|
|
|
{
|
2016-12-19 11:24:30 +03:00
|
|
|
// We don't care about audio in this state.
|
2016-12-14 09:51:17 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoWaited(MediaData::Type aType) override
|
|
|
|
{
|
2016-12-15 12:25:44 +03:00
|
|
|
MOZ_ASSERT(!mSeekJob.mPromise.IsEmpty(), "Seek shouldn't be finished");
|
2016-12-19 12:18:03 +03:00
|
|
|
MOZ_ASSERT(NeedMoreVideo());
|
|
|
|
RequestVideoData();
|
2016-12-14 09:51:17 +03:00
|
|
|
}
|
|
|
|
|
2017-03-28 11:44:40 +03:00
|
|
|
TimeUnit CalculateNewCurrentTime() const override
|
2016-12-10 02:35:11 +03:00
|
|
|
{
|
2016-12-14 06:21:24 +03:00
|
|
|
// The HTMLMediaElement.currentTime should be updated to the seek target
|
|
|
|
// which has been updated to the next frame's time.
|
2017-03-28 11:44:40 +03:00
|
|
|
return mSeekJob.mTarget->GetTime();
|
2016-12-10 02:35:11 +03:00
|
|
|
}
|
|
|
|
|
2017-05-24 09:05:29 +03:00
|
|
|
void DoSeek() override
|
|
|
|
{
|
2017-05-25 07:06:34 +03:00
|
|
|
auto currentTime = mCurrentTime;
|
|
|
|
DiscardFrames(VideoQueue(), [currentTime] (int64_t aSampleTime) {
|
|
|
|
return aSampleTime <= currentTime.ToMicroseconds();
|
|
|
|
});
|
|
|
|
|
|
|
|
// If there is a pending video request, finish the seeking if we don't need
|
|
|
|
// more data, or wait for HandleVideoDecoded() to finish seeking.
|
|
|
|
if (mMaster->IsRequestingVideoData()) {
|
|
|
|
if (!NeedMoreVideo()) {
|
|
|
|
FinishSeek();
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise, we need to do the seek operation asynchronously for a special
|
2017-05-24 09:05:29 +03:00
|
|
|
// case (bug504613.ogv) which has no data at all, the 1st seekToNextFrame()
|
|
|
|
// operation reaches the end of the media. If we did the seek operation
|
|
|
|
// synchronously, we immediately resolve the SeekPromise in mSeekJob and
|
|
|
|
// then switch to the CompletedState which dispatches an "ended" event.
|
|
|
|
// However, the ThenValue of the SeekPromise has not yet been set, so the
|
|
|
|
// promise resolving is postponed and then the JS developer receives the
|
|
|
|
// "ended" event before the seek promise is resolved.
|
|
|
|
// An asynchronous seek operation helps to solve this issue since while the
|
|
|
|
// seek is actually performed, the ThenValue of SeekPromise has already
|
|
|
|
// been set so that it won't be postponed.
|
|
|
|
RefPtr<Runnable> r = mAsyncSeekTask = new AysncNextFrameSeekTask(this);
|
|
|
|
OwnerThread()->Dispatch(r.forget());
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
void DoSeekInternal()
|
|
|
|
{
|
2017-05-25 07:06:34 +03:00
|
|
|
// We don't need to discard frames to the mCurrentTime here because we have
|
|
|
|
// done it at DoSeek() and any video data received in between either
|
|
|
|
// finishes the seek operation or be discarded, see HandleVideoDecoded().
|
2017-05-24 09:05:29 +03:00
|
|
|
|
|
|
|
if (!NeedMoreVideo()) {
|
|
|
|
FinishSeek();
|
2017-09-04 12:27:43 +03:00
|
|
|
} else if (!mMaster->IsRequestingVideoData() &&
|
|
|
|
!mMaster->IsWaitingVideoData()) {
|
2017-05-24 09:05:29 +03:00
|
|
|
RequestVideoData();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
class AysncNextFrameSeekTask : public Runnable
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
explicit AysncNextFrameSeekTask(NextFrameSeekingState* aStateObject)
|
2017-06-12 22:34:10 +03:00
|
|
|
: Runnable("MediaDecoderStateMachine::NextFrameSeekingState::"
|
|
|
|
"AysncNextFrameSeekTask")
|
|
|
|
, mStateObj(aStateObject)
|
2017-05-24 09:05:29 +03:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
void Cancel() { mStateObj = nullptr; }
|
|
|
|
|
|
|
|
NS_IMETHOD Run() override
|
|
|
|
{
|
|
|
|
if (mStateObj) {
|
|
|
|
mStateObj->DoSeekInternal();
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
NextFrameSeekingState* mStateObj;
|
|
|
|
};
|
|
|
|
|
2016-12-14 10:04:00 +03:00
|
|
|
void RequestVideoData()
|
|
|
|
{
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(media::TimeUnit());
|
2016-12-14 10:04:00 +03:00
|
|
|
}
|
|
|
|
|
2016-12-14 10:15:40 +03:00
|
|
|
bool NeedMoreVideo() const
|
|
|
|
{
|
|
|
|
// Need to request video when we have none and video queue is not finished.
|
2017-09-04 12:27:43 +03:00
|
|
|
return VideoQueue().GetSize() == 0 && !VideoQueue().IsFinished();
|
2016-12-14 10:15:40 +03:00
|
|
|
}
|
|
|
|
|
2016-12-14 10:28:23 +03:00
|
|
|
// Update the seek target's time before resolving this seek task, the updated
|
2017-01-27 15:20:37 +03:00
|
|
|
// time will be used in the MDSM::SeekCompleted() to update the MDSM's
|
|
|
|
// position.
|
2016-12-14 10:28:23 +03:00
|
|
|
void UpdateSeekTargetTime()
|
|
|
|
{
|
2017-03-22 06:59:54 +03:00
|
|
|
RefPtr<VideoData> data = VideoQueue().PeekFront();
|
2016-12-14 10:28:23 +03:00
|
|
|
if (data) {
|
2017-04-14 12:13:36 +03:00
|
|
|
mSeekJob.mTarget->SetTime(data->mTime);
|
2016-12-14 10:28:23 +03:00
|
|
|
} else {
|
2016-12-19 12:40:10 +03:00
|
|
|
MOZ_ASSERT(VideoQueue().AtEndOfStream());
|
|
|
|
mSeekJob.mTarget->SetTime(mDuration);
|
2016-12-14 10:28:23 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-19 12:29:01 +03:00
|
|
|
void FinishSeek()
|
2016-12-14 09:57:44 +03:00
|
|
|
{
|
2016-12-19 12:29:01 +03:00
|
|
|
MOZ_ASSERT(!NeedMoreVideo());
|
|
|
|
UpdateSeekTargetTime();
|
|
|
|
auto time = mSeekJob.mTarget->GetTime().ToMicroseconds();
|
|
|
|
DiscardFrames(AudioQueue(), [time] (int64_t aSampleTime) {
|
|
|
|
return aSampleTime < time;
|
|
|
|
});
|
|
|
|
SeekCompleted();
|
2016-12-14 09:57:44 +03:00
|
|
|
}
|
|
|
|
|
2016-12-14 11:27:03 +03:00
|
|
|
/*
|
|
|
|
* Internal state.
|
|
|
|
*/
|
2017-03-28 11:38:26 +03:00
|
|
|
TimeUnit mCurrentTime;
|
|
|
|
TimeUnit mDuration;
|
2016-12-15 12:25:44 +03:00
|
|
|
RefPtr<AysncNextFrameSeekTask> mAsyncSeekTask;
|
2016-11-29 13:37:27 +03:00
|
|
|
};
|
2016-11-30 05:32:29 +03:00
|
|
|
|
2017-05-24 10:11:10 +03:00
|
|
|
class MediaDecoderStateMachine::NextFrameSeekingFromDormantState
|
|
|
|
: public MediaDecoderStateMachine::AccurateSeekingState
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
explicit NextFrameSeekingFromDormantState(Master* aPtr)
|
|
|
|
: AccurateSeekingState(aPtr)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
RefPtr<MediaDecoder::SeekPromise> Enter(SeekJob&& aCurrentSeekJob,
|
|
|
|
SeekJob&& aFutureSeekJob)
|
|
|
|
{
|
|
|
|
mFutureSeekJob = Move(aFutureSeekJob);
|
|
|
|
|
2017-05-25 10:09:10 +03:00
|
|
|
AccurateSeekingState::Enter(Move(aCurrentSeekJob),
|
2017-05-25 10:40:59 +03:00
|
|
|
EventVisibility::Suppressed);
|
|
|
|
|
2017-05-24 10:11:10 +03:00
|
|
|
return mFutureSeekJob.mPromise.Ensure(__func__);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Exit() override
|
|
|
|
{
|
|
|
|
mFutureSeekJob.RejectIfExists(__func__);
|
|
|
|
AccurateSeekingState::Exit();
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
SeekJob mFutureSeekJob;
|
2017-05-25 10:09:10 +03:00
|
|
|
|
2017-05-25 10:40:59 +03:00
|
|
|
// We don't want to transition to DecodingState once this seek completes,
|
|
|
|
// instead, we transition to NextFrameSeekingState.
|
|
|
|
void GoToNextState() override
|
|
|
|
{
|
|
|
|
SetState<NextFrameSeekingState>(Move(mFutureSeekJob),
|
|
|
|
EventVisibility::Observable);
|
|
|
|
}
|
2017-05-24 10:11:10 +03:00
|
|
|
};
|
|
|
|
|
2017-07-03 10:32:39 +03:00
|
|
|
class MediaDecoderStateMachine::VideoOnlySeekingState
|
|
|
|
: public MediaDecoderStateMachine::AccurateSeekingState
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
explicit VideoOnlySeekingState(Master* aPtr) : AccurateSeekingState(aPtr) { }
|
2017-07-06 06:37:51 +03:00
|
|
|
|
|
|
|
RefPtr<MediaDecoder::SeekPromise> Enter(SeekJob&& aSeekJob,
|
|
|
|
EventVisibility aVisibility)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(aSeekJob.mTarget->IsVideoOnly());
|
|
|
|
MOZ_ASSERT(aVisibility == EventVisibility::Suppressed);
|
|
|
|
|
|
|
|
RefPtr<MediaDecoder::SeekPromise> p =
|
|
|
|
AccurateSeekingState::Enter(Move(aSeekJob), aVisibility);
|
|
|
|
|
|
|
|
// Dispatch a mozvideoonlyseekbegin event to indicate UI for corresponding
|
|
|
|
// changes.
|
|
|
|
mMaster->mOnPlaybackEvent.Notify(MediaEventType::VideoOnlySeekBegin);
|
|
|
|
|
|
|
|
return p.forget();
|
|
|
|
}
|
|
|
|
|
2017-07-06 06:42:27 +03:00
|
|
|
void Exit() override
|
|
|
|
{
|
2017-07-04 12:49:43 +03:00
|
|
|
// We are completing or discarding this video-only seek operation now,
|
|
|
|
// dispatch an event so that the UI can change in response to the end
|
|
|
|
// of video-only seek.
|
|
|
|
mMaster->mOnPlaybackEvent.Notify(MediaEventType::VideoOnlySeekCompleted);
|
2017-07-06 06:42:27 +03:00
|
|
|
|
|
|
|
AccurateSeekingState::Exit();
|
|
|
|
}
|
2017-07-06 06:45:07 +03:00
|
|
|
|
|
|
|
void HandleAudioDecoded(AudioData* aAudio) override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(mDoneAudioSeeking && !mDoneVideoSeeking,
|
|
|
|
"Seek shouldn't be finished");
|
|
|
|
MOZ_ASSERT(aAudio);
|
|
|
|
|
|
|
|
// Video-only seek doesn't reset audio decoder. There might be pending audio
|
|
|
|
// requests when AccurateSeekTask::Seek() begins. We will just store the
|
|
|
|
// data without checking |mDiscontinuity| or calling
|
|
|
|
// DropAudioUpToSeekTarget().
|
|
|
|
mMaster->PushAudio(aAudio);
|
|
|
|
}
|
2017-07-06 06:47:21 +03:00
|
|
|
|
|
|
|
void HandleWaitingForAudio() override { }
|
|
|
|
|
|
|
|
void HandleAudioCanceled() override { }
|
|
|
|
|
|
|
|
void HandleEndOfAudio() override { }
|
2017-07-06 06:48:42 +03:00
|
|
|
|
|
|
|
void HandleAudioWaited(MediaData::Type aType) override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mDoneAudioSeeking || !mDoneVideoSeeking,
|
|
|
|
"Seek shouldn't be finished");
|
|
|
|
|
|
|
|
// Ignore pending requests from video-only seek.
|
|
|
|
}
|
2017-07-06 06:51:09 +03:00
|
|
|
|
|
|
|
void DoSeek() override
|
|
|
|
{
|
|
|
|
// TODO: keep decoding audio.
|
|
|
|
mDoneAudioSeeking = true;
|
|
|
|
mDoneVideoSeeking = !Info().HasVideo();
|
|
|
|
|
|
|
|
mMaster->ResetDecode(TrackInfo::kVideoTrack);
|
|
|
|
|
|
|
|
DemuxerSeek();
|
|
|
|
}
|
2017-07-11 04:56:09 +03:00
|
|
|
|
2017-07-11 04:59:13 +03:00
|
|
|
protected:
|
|
|
|
// Allow skip-to-next-key-frame to kick in if we fall behind the current
|
|
|
|
// playback position so decoding has a better chance to catch up.
|
|
|
|
void RequestVideoData() override
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mDoneVideoSeeking);
|
2017-07-12 12:03:45 +03:00
|
|
|
|
|
|
|
const auto& clock = mMaster->mMediaSink->IsStarted()
|
|
|
|
? mMaster->GetClock()
|
|
|
|
: mMaster->GetMediaTime();
|
|
|
|
const auto& nextKeyFrameTime = GetNextKeyFrameTime();
|
|
|
|
|
|
|
|
auto threshold = clock;
|
|
|
|
|
|
|
|
if (nextKeyFrameTime.IsValid() &&
|
|
|
|
clock >= (nextKeyFrameTime - sSkipToNextKeyFrameThreshold)) {
|
|
|
|
threshold = nextKeyFrameTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
mMaster->RequestVideoData(threshold);
|
2017-07-11 04:59:13 +03:00
|
|
|
}
|
|
|
|
|
2017-07-11 04:56:09 +03:00
|
|
|
private:
|
2017-07-12 12:03:45 +03:00
|
|
|
// Trigger skip to next key frame if the current playback position is very
|
|
|
|
// close the next key frame's time.
|
|
|
|
static constexpr TimeUnit sSkipToNextKeyFrameThreshold = TimeUnit::FromMicroseconds(5000);
|
|
|
|
|
2017-07-11 04:56:09 +03:00
|
|
|
// If the media is playing, drop video until catch up playback position.
|
|
|
|
media::TimeUnit GetSeekTarget() const override
|
|
|
|
{
|
|
|
|
return mMaster->mMediaSink->IsStarted()
|
|
|
|
? mMaster->GetClock()
|
|
|
|
: mSeekJob.mTarget->GetTime();
|
|
|
|
}
|
2017-07-12 12:03:45 +03:00
|
|
|
|
|
|
|
media::TimeUnit GetNextKeyFrameTime() const
|
|
|
|
{
|
|
|
|
// We only call this method in RequestVideoData() and we only request video
|
|
|
|
// data if we haven't done video seeking.
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(!mDoneVideoSeeking);
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(mMaster->VideoQueue().GetSize() == 0);
|
|
|
|
|
|
|
|
if (mFirstVideoFrameAfterSeek) {
|
|
|
|
return mFirstVideoFrameAfterSeek->NextKeyFrameTime();
|
|
|
|
}
|
|
|
|
|
|
|
|
return TimeUnit::Invalid();
|
|
|
|
}
|
|
|
|
|
2017-07-03 10:32:39 +03:00
|
|
|
};
|
|
|
|
|
2017-07-12 12:03:45 +03:00
|
|
|
constexpr TimeUnit
|
|
|
|
MediaDecoderStateMachine::VideoOnlySeekingState::sSkipToNextKeyFrameThreshold;
|
|
|
|
|
2017-05-24 10:11:10 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise>
|
|
|
|
MediaDecoderStateMachine::DormantState::HandleSeek(SeekTarget aTarget)
|
|
|
|
{
|
|
|
|
if (aTarget.IsNextFrame()) {
|
|
|
|
// NextFrameSeekingState doesn't reset the decoder unlike
|
|
|
|
// AccurateSeekingState. So we first must come out of dormant by seeking to
|
|
|
|
// mPendingSeek and continue later with the NextFrameSeek
|
|
|
|
SLOG("Changed state to SEEKING (to %" PRId64 ")",
|
|
|
|
aTarget.GetTime().ToMicroseconds());
|
|
|
|
SeekJob seekJob;
|
|
|
|
seekJob.mTarget = Some(aTarget);
|
|
|
|
return StateObject::SetState<NextFrameSeekingFromDormantState>(
|
|
|
|
Move(mPendingSeek), Move(seekJob));
|
|
|
|
}
|
|
|
|
|
|
|
|
return StateObject::HandleSeek(aTarget);
|
|
|
|
}
|
|
|
|
|
2016-10-21 06:37:00 +03:00
|
|
|
/**
|
|
|
|
* Purpose: stop playback until enough data is decoded to continue playback.
|
|
|
|
*
|
|
|
|
* Transition to:
|
|
|
|
* SEEKING if any seek request.
|
|
|
|
* SHUTDOWN if any decode error.
|
|
|
|
* COMPLETED when having decoded all audio/video data.
|
|
|
|
* DECODING when having decoded enough data to continue playback.
|
|
|
|
*/
|
2016-09-06 06:30:30 +03:00
|
|
|
class MediaDecoderStateMachine::BufferingState
|
|
|
|
: public MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2017-01-27 15:20:37 +03:00
|
|
|
explicit BufferingState(Master* aPtr) : StateObject(aPtr) { }
|
2016-09-06 06:30:30 +03:00
|
|
|
|
2016-10-12 09:16:40 +03:00
|
|
|
void Enter()
|
2016-09-06 06:30:30 +03:00
|
|
|
{
|
2016-09-06 12:05:48 +03:00
|
|
|
if (mMaster->IsPlaying()) {
|
|
|
|
mMaster->StopPlayback();
|
|
|
|
}
|
|
|
|
|
2016-09-06 12:22:29 +03:00
|
|
|
mBufferingStart = TimeStamp::Now();
|
2017-03-28 12:10:59 +03:00
|
|
|
mMaster->ScheduleStateMachineIn(TimeUnit::FromMicroseconds(USECS_PER_S));
|
2017-01-27 15:20:37 +03:00
|
|
|
mMaster->UpdateNextFrameStatus(
|
|
|
|
MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING);
|
2016-09-06 06:30:30 +03:00
|
|
|
}
|
|
|
|
|
2016-10-11 09:22:24 +03:00
|
|
|
void Step() override;
|
2016-09-06 06:30:30 +03:00
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
State GetState() const override { return DECODER_STATE_BUFFERING; }
|
2016-09-06 12:22:29 +03:00
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleAudioDecoded(AudioData* aAudio) override
|
2016-09-30 11:45:48 +03:00
|
|
|
{
|
2017-08-09 11:56:41 +03:00
|
|
|
mMaster->PushAudio(aAudio);
|
|
|
|
if (!mMaster->HaveEnoughDecodedAudio()) {
|
|
|
|
mMaster->RequestAudioData();
|
|
|
|
}
|
2016-09-30 11:45:48 +03:00
|
|
|
// This might be the sample we need to exit buffering.
|
|
|
|
// Schedule Step() to check it.
|
|
|
|
mMaster->ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
|
2017-03-22 06:59:54 +03:00
|
|
|
void HandleVideoDecoded(VideoData* aVideo, TimeStamp aDecodeStart) override
|
2016-09-30 11:53:33 +03:00
|
|
|
{
|
2017-08-09 11:56:41 +03:00
|
|
|
mMaster->PushVideo(aVideo);
|
|
|
|
if (!mMaster->HaveEnoughDecodedVideo()) {
|
|
|
|
mMaster->RequestVideoData(media::TimeUnit());
|
|
|
|
}
|
2016-09-30 11:53:33 +03:00
|
|
|
// This might be the sample we need to exit buffering.
|
|
|
|
// Schedule Step() to check it.
|
|
|
|
mMaster->ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
|
2017-05-22 10:33:18 +03:00
|
|
|
void HandleAudioCanceled() override { mMaster->RequestAudioData(); }
|
2017-01-09 10:44:18 +03:00
|
|
|
|
|
|
|
void HandleVideoCanceled() override
|
|
|
|
{
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(media::TimeUnit());
|
2017-01-09 10:44:18 +03:00
|
|
|
}
|
|
|
|
|
2017-01-09 14:43:55 +03:00
|
|
|
void HandleWaitingForAudio() override
|
|
|
|
{
|
|
|
|
mMaster->WaitForData(MediaData::AUDIO_DATA);
|
|
|
|
}
|
|
|
|
|
|
|
|
void HandleWaitingForVideo() override
|
|
|
|
{
|
|
|
|
mMaster->WaitForData(MediaData::VIDEO_DATA);
|
|
|
|
}
|
|
|
|
|
2017-01-09 10:53:12 +03:00
|
|
|
void HandleAudioWaited(MediaData::Type aType) override
|
|
|
|
{
|
2017-01-09 15:27:36 +03:00
|
|
|
mMaster->RequestAudioData();
|
2017-01-09 10:53:12 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void HandleVideoWaited(MediaData::Type aType) override
|
|
|
|
{
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(media::TimeUnit());
|
2017-01-09 10:53:12 +03:00
|
|
|
}
|
|
|
|
|
2017-01-09 08:18:37 +03:00
|
|
|
void HandleEndOfAudio() override;
|
|
|
|
void HandleEndOfVideo() override;
|
2016-09-30 12:27:31 +03:00
|
|
|
|
2016-10-18 05:52:24 +03:00
|
|
|
void HandleVideoSuspendTimeout() override
|
|
|
|
{
|
2017-03-08 14:28:13 +03:00
|
|
|
// No video, so nothing to suspend.
|
2017-03-11 10:09:15 +03:00
|
|
|
if (!mMaster->HasVideo()) {
|
|
|
|
return;
|
|
|
|
}
|
2017-03-08 14:28:13 +03:00
|
|
|
|
|
|
|
mMaster->mVideoDecodeSuspended = true;
|
|
|
|
mMaster->mOnPlaybackEvent.Notify(MediaEventType::EnterVideoSuspend);
|
|
|
|
Reader()->SetVideoBlankDecode(true);
|
2016-10-18 05:52:24 +03:00
|
|
|
}
|
|
|
|
|
2016-09-06 12:22:29 +03:00
|
|
|
private:
|
|
|
|
TimeStamp mBufferingStart;
|
2016-10-05 11:15:17 +03:00
|
|
|
|
|
|
|
// The maximum number of second we spend buffering when we are short on
|
|
|
|
// unbuffered data.
|
|
|
|
const uint32_t mBufferingWait = 15;
|
2016-09-06 06:30:30 +03:00
|
|
|
};
|
|
|
|
|
2016-10-21 06:37:00 +03:00
|
|
|
/**
|
|
|
|
* Purpose: play all the decoded data and fire the 'ended' event.
|
|
|
|
*
|
|
|
|
* Transition to:
|
|
|
|
* SEEKING if any seek request.
|
|
|
|
*/
|
2016-09-06 06:34:09 +03:00
|
|
|
class MediaDecoderStateMachine::CompletedState
|
|
|
|
: public MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2017-01-27 15:20:37 +03:00
|
|
|
explicit CompletedState(Master* aPtr) : StateObject(aPtr) { }
|
2016-09-06 06:34:09 +03:00
|
|
|
|
2016-10-12 09:16:40 +03:00
|
|
|
void Enter()
|
2016-09-06 06:34:09 +03:00
|
|
|
{
|
2017-08-07 10:48:49 +03:00
|
|
|
// On Android, the life cycle of graphic buffer is equal to Android's codec,
|
|
|
|
// we couldn't release it if we still need to render the frame.
|
2017-07-25 11:33:44 +03:00
|
|
|
#ifndef MOZ_WIDGET_ANDROID
|
2017-06-08 06:42:43 +03:00
|
|
|
if (!mMaster->mLooping) {
|
|
|
|
// We've decoded all samples.
|
|
|
|
// We don't need decoders anymore if not looping.
|
|
|
|
Reader()->ReleaseResources();
|
|
|
|
}
|
2017-07-25 11:33:44 +03:00
|
|
|
#endif
|
2017-09-04 12:27:43 +03:00
|
|
|
bool hasNextFrame = (!mMaster->HasAudio() || !mMaster->mAudioCompleted) &&
|
|
|
|
(!mMaster->HasVideo() || !mMaster->mVideoCompleted);
|
2016-11-10 11:35:48 +03:00
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
mMaster->UpdateNextFrameStatus(
|
|
|
|
hasNextFrame ? MediaDecoderOwner::NEXT_FRAME_AVAILABLE
|
|
|
|
: MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE);
|
2016-11-10 11:35:48 +03:00
|
|
|
|
2016-11-15 06:54:52 +03:00
|
|
|
Step();
|
2016-09-06 06:34:09 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void Exit() override
|
|
|
|
{
|
2016-09-06 12:33:12 +03:00
|
|
|
mSentPlaybackEndedEvent = false;
|
2016-09-06 06:34:09 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void Step() override
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
if (mMaster->mPlayState != MediaDecoder::PLAY_STATE_PLAYING &&
|
|
|
|
mMaster->IsPlaying()) {
|
2016-09-06 12:28:08 +03:00
|
|
|
mMaster->StopPlayback();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Play the remaining media. We want to run AdvanceFrame() at least
|
|
|
|
// once to ensure the current playback position is advanced to the
|
|
|
|
// end of the media, and so that we update the readyState.
|
2017-09-04 12:27:43 +03:00
|
|
|
if ((mMaster->HasVideo() && !mMaster->mVideoCompleted) ||
|
|
|
|
(mMaster->HasAudio() && !mMaster->mAudioCompleted)) {
|
2016-09-06 12:28:08 +03:00
|
|
|
// Start playback if necessary to play the remaining media.
|
|
|
|
mMaster->MaybeStartPlayback();
|
|
|
|
mMaster->UpdatePlaybackPositionPeriodically();
|
2017-09-04 12:27:43 +03:00
|
|
|
MOZ_ASSERT(!mMaster->IsPlaying() || mMaster->IsStateMachineScheduled(),
|
2016-09-06 12:28:08 +03:00
|
|
|
"Must have timer scheduled");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// StopPlayback in order to reset the IsPlaying() state so audio
|
|
|
|
// is restarted correctly.
|
|
|
|
mMaster->StopPlayback();
|
|
|
|
|
2016-11-15 07:06:17 +03:00
|
|
|
if (!mSentPlaybackEndedEvent) {
|
2017-03-28 10:50:53 +03:00
|
|
|
auto clockTime =
|
2017-01-27 15:20:37 +03:00
|
|
|
std::max(mMaster->AudioEndTime(), mMaster->VideoEndTime());
|
2017-08-01 08:54:14 +03:00
|
|
|
if (mMaster->mDuration.Ref()->IsInfinite()) {
|
|
|
|
// We have a finite duration when playback reaches the end.
|
|
|
|
mMaster->mDuration = Some(clockTime);
|
|
|
|
}
|
2017-03-28 11:01:55 +03:00
|
|
|
mMaster->UpdatePlaybackPosition(clockTime);
|
2016-09-06 12:28:08 +03:00
|
|
|
|
|
|
|
mMaster->mOnPlaybackEvent.Notify(MediaEventType::PlaybackEnded);
|
|
|
|
|
2016-09-06 12:33:12 +03:00
|
|
|
mSentPlaybackEndedEvent = true;
|
2016-09-06 12:28:08 +03:00
|
|
|
|
|
|
|
// MediaSink::GetEndTime() must be called before stopping playback.
|
|
|
|
mMaster->StopMediaSink();
|
|
|
|
}
|
2016-09-06 06:34:09 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
State GetState() const override
|
|
|
|
{
|
|
|
|
return DECODER_STATE_COMPLETED;
|
|
|
|
}
|
2016-09-06 12:33:12 +03:00
|
|
|
|
2016-10-28 11:33:08 +03:00
|
|
|
void HandleAudioCaptured() override
|
2016-10-05 12:03:08 +03:00
|
|
|
{
|
|
|
|
// MediaSink is changed. Schedule Step() to check if we can start playback.
|
|
|
|
mMaster->ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
|
2016-10-18 05:54:39 +03:00
|
|
|
void HandleVideoSuspendTimeout() override
|
|
|
|
{
|
|
|
|
// Do nothing since no decoding is going on.
|
|
|
|
}
|
|
|
|
|
2017-03-28 06:30:32 +03:00
|
|
|
void HandleResumeVideoDecoding(const TimeUnit&) override
|
|
|
|
{
|
|
|
|
// Resume the video decoder and seek to the last video frame.
|
|
|
|
// This triggers a video-only seek which won't update the playback position.
|
2017-03-28 12:20:17 +03:00
|
|
|
StateObject::HandleResumeVideoDecoding(mMaster->mDecodedVideoEndTime);
|
2017-03-28 06:30:32 +03:00
|
|
|
}
|
|
|
|
|
2016-10-21 09:53:18 +03:00
|
|
|
void HandlePlayStateChanged(MediaDecoder::PlayState aPlayState) override
|
|
|
|
{
|
|
|
|
if (aPlayState == MediaDecoder::PLAY_STATE_PLAYING) {
|
|
|
|
// Schedule Step() to check if we can start playback.
|
|
|
|
mMaster->ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-06 12:33:12 +03:00
|
|
|
private:
|
|
|
|
bool mSentPlaybackEndedEvent = false;
|
2016-09-06 06:34:09 +03:00
|
|
|
};
|
|
|
|
|
2016-10-21 06:37:00 +03:00
|
|
|
/**
|
|
|
|
* Purpose: release all resources allocated by MDSM.
|
|
|
|
*
|
|
|
|
* Transition to:
|
|
|
|
* None since this is the final state.
|
|
|
|
*
|
|
|
|
* Transition from:
|
|
|
|
* Any states other than SHUTDOWN.
|
|
|
|
*/
|
2016-09-06 06:37:46 +03:00
|
|
|
class MediaDecoderStateMachine::ShutdownState
|
|
|
|
: public MediaDecoderStateMachine::StateObject
|
|
|
|
{
|
|
|
|
public:
|
2017-01-27 15:20:37 +03:00
|
|
|
explicit ShutdownState(Master* aPtr) : StateObject(aPtr) { }
|
2016-09-06 06:37:46 +03:00
|
|
|
|
2016-10-13 08:53:32 +03:00
|
|
|
RefPtr<ShutdownPromise> Enter();
|
2016-09-06 06:37:46 +03:00
|
|
|
|
|
|
|
void Exit() override
|
|
|
|
{
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(false, "Shouldn't escape the SHUTDOWN state.");
|
|
|
|
}
|
|
|
|
|
|
|
|
State GetState() const override
|
|
|
|
{
|
|
|
|
return DECODER_STATE_SHUTDOWN;
|
|
|
|
}
|
2016-09-12 05:34:35 +03:00
|
|
|
|
2016-10-11 06:11:42 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget) override
|
|
|
|
{
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(false, "Can't seek in shutdown state.");
|
|
|
|
return MediaDecoder::SeekPromise::CreateAndReject(true, __func__);
|
|
|
|
}
|
2016-10-11 11:20:23 +03:00
|
|
|
|
|
|
|
RefPtr<ShutdownPromise> HandleShutdown() override
|
|
|
|
{
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(false, "Already shutting down.");
|
|
|
|
return nullptr;
|
|
|
|
}
|
2016-10-18 05:55:56 +03:00
|
|
|
|
|
|
|
void HandleVideoSuspendTimeout() override
|
|
|
|
{
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(false, "Already shutting down.");
|
|
|
|
}
|
2016-10-18 09:56:36 +03:00
|
|
|
|
2017-03-28 06:21:41 +03:00
|
|
|
void HandleResumeVideoDecoding(const TimeUnit&) override
|
2016-10-18 09:56:36 +03:00
|
|
|
{
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(false, "Already shutting down.");
|
|
|
|
}
|
2016-09-06 06:37:46 +03:00
|
|
|
};
|
|
|
|
|
2016-11-03 10:21:48 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise>
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
StateObject::HandleSeek(SeekTarget aTarget)
|
|
|
|
{
|
2016-12-16 06:16:31 +03:00
|
|
|
SLOG("Changed state to SEEKING (to %" PRId64 ")", aTarget.GetTime().ToMicroseconds());
|
2016-11-03 10:21:48 +03:00
|
|
|
SeekJob seekJob;
|
2016-12-19 07:25:14 +03:00
|
|
|
seekJob.mTarget = Some(aTarget);
|
2016-11-30 05:32:29 +03:00
|
|
|
return SetSeekingState(Move(seekJob), EventVisibility::Observable);
|
2016-11-03 10:21:48 +03:00
|
|
|
}
|
|
|
|
|
2016-10-11 11:20:23 +03:00
|
|
|
RefPtr<ShutdownPromise>
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
StateObject::HandleShutdown()
|
|
|
|
{
|
2016-10-13 08:53:32 +03:00
|
|
|
return SetState<ShutdownState>();
|
2016-10-11 11:20:23 +03:00
|
|
|
}
|
|
|
|
|
2016-10-18 09:56:36 +03:00
|
|
|
static void
|
|
|
|
ReportRecoveryTelemetry(const TimeStamp& aRecoveryStart,
|
|
|
|
const MediaInfo& aMediaInfo,
|
|
|
|
bool aIsHardwareAccelerated)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
if (!aMediaInfo.HasVideo()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Keyed by audio+video or video alone, hardware acceleration,
|
|
|
|
// and by a resolution range.
|
|
|
|
nsCString key(aMediaInfo.HasAudio() ? "AV" : "V");
|
|
|
|
key.AppendASCII(aIsHardwareAccelerated ? "(hw)," : ",");
|
|
|
|
static const struct { int32_t mH; const char* mRes; } sResolutions[] = {
|
|
|
|
{ 240, "0-240" },
|
|
|
|
{ 480, "241-480" },
|
|
|
|
{ 720, "481-720" },
|
|
|
|
{ 1080, "721-1080" },
|
|
|
|
{ 2160, "1081-2160" }
|
|
|
|
};
|
|
|
|
const char* resolution = "2161+";
|
|
|
|
int32_t height = aMediaInfo.mVideo.mImage.height;
|
|
|
|
for (const auto& res : sResolutions) {
|
|
|
|
if (height <= res.mH) {
|
|
|
|
resolution = res.mRes;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
key.AppendASCII(resolution);
|
|
|
|
|
|
|
|
TimeDuration duration = TimeStamp::Now() - aRecoveryStart;
|
|
|
|
double duration_ms = duration.ToMilliseconds();
|
|
|
|
Telemetry::Accumulate(Telemetry::VIDEO_SUSPEND_RECOVERY_TIME_MS,
|
|
|
|
key,
|
|
|
|
uint32_t(duration_ms + 0.5));
|
|
|
|
Telemetry::Accumulate(Telemetry::VIDEO_SUSPEND_RECOVERY_TIME_MS,
|
|
|
|
NS_LITERAL_CSTRING("All"),
|
|
|
|
uint32_t(duration_ms + 0.5));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
2017-03-28 06:21:41 +03:00
|
|
|
StateObject::HandleResumeVideoDecoding(const TimeUnit& aTarget)
|
2016-10-18 09:56:36 +03:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(mMaster->mVideoDecodeSuspended);
|
|
|
|
|
|
|
|
// Start counting recovery time from right now.
|
|
|
|
TimeStamp start = TimeStamp::Now();
|
|
|
|
|
|
|
|
// Local reference to mInfo, so that it will be copied in the lambda below.
|
|
|
|
auto& info = Info();
|
|
|
|
bool hw = Reader()->VideoIsHardwareAccelerated();
|
|
|
|
|
|
|
|
// Start video-only seek to the current time.
|
|
|
|
SeekJob seekJob;
|
|
|
|
|
2017-04-25 11:01:43 +03:00
|
|
|
// We use fastseek to optimize the resuming time.
|
|
|
|
// FastSeek is only used for video-only media since we don't need to worry
|
|
|
|
// about A/V sync.
|
|
|
|
// Don't use fastSeek if we want to seek to the end because it might seek to a
|
|
|
|
// keyframe before the last frame (if the last frame itself is not a keyframe)
|
|
|
|
// and we always want to present the final frame to the user when seeking to
|
|
|
|
// the end.
|
|
|
|
const auto type = mMaster->HasAudio() || aTarget == mMaster->Duration()
|
|
|
|
? SeekTarget::Type::Accurate
|
|
|
|
: SeekTarget::Type::PrevSyncPoint;
|
2016-10-18 09:56:36 +03:00
|
|
|
|
2017-03-28 06:21:41 +03:00
|
|
|
seekJob.mTarget.emplace(aTarget, type, true /* aVideoOnly */);
|
2016-10-18 09:56:36 +03:00
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
// Hold mMaster->mAbstractMainThread here because this->mMaster will be
|
|
|
|
// invalid after the current state object is deleted in SetState();
|
2016-11-29 08:03:36 +03:00
|
|
|
RefPtr<AbstractThread> mainThread = mMaster->mAbstractMainThread;
|
|
|
|
|
2017-04-06 10:14:37 +03:00
|
|
|
SetSeekingState(Move(seekJob), EventVisibility::Suppressed)->Then(
|
2016-11-29 08:03:36 +03:00
|
|
|
mainThread, __func__,
|
2016-10-18 09:56:36 +03:00
|
|
|
[start, info, hw](){ ReportRecoveryTelemetry(start, info, hw); },
|
|
|
|
[](){});
|
|
|
|
}
|
|
|
|
|
2016-11-30 05:32:29 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise>
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
StateObject::SetSeekingState(SeekJob&& aSeekJob, EventVisibility aVisibility)
|
|
|
|
{
|
2016-12-19 07:25:14 +03:00
|
|
|
if (aSeekJob.mTarget->IsAccurate() || aSeekJob.mTarget->IsFast()) {
|
2017-07-03 10:32:39 +03:00
|
|
|
if (aSeekJob.mTarget->IsVideoOnly()) {
|
|
|
|
return SetState<VideoOnlySeekingState>(Move(aSeekJob), aVisibility);
|
|
|
|
}
|
2016-11-30 05:32:29 +03:00
|
|
|
return SetState<AccurateSeekingState>(Move(aSeekJob), aVisibility);
|
|
|
|
}
|
|
|
|
|
2016-12-19 07:25:14 +03:00
|
|
|
if (aSeekJob.mTarget->IsNextFrame()) {
|
2016-11-30 05:32:29 +03:00
|
|
|
return SetState<NextFrameSeekingState>(Move(aSeekJob), aVisibility);
|
|
|
|
}
|
|
|
|
|
|
|
|
MOZ_ASSERT_UNREACHABLE("Unknown SeekTarget::Type.");
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2016-10-11 09:22:24 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
2017-05-18 05:20:44 +03:00
|
|
|
DecodeMetadataState::OnMetadataRead(MetadataHolder&& aMetadata)
|
2016-10-11 09:22:24 +03:00
|
|
|
{
|
|
|
|
mMetadataRequest.Complete();
|
|
|
|
|
2017-05-18 05:20:44 +03:00
|
|
|
mMaster->mInfo.emplace(*aMetadata.mInfo);
|
2016-11-02 10:52:15 +03:00
|
|
|
mMaster->mMediaSeekable = Info().mMediaSeekable;
|
2017-01-27 15:20:37 +03:00
|
|
|
mMaster->mMediaSeekableOnlyInBufferedRanges =
|
|
|
|
Info().mMediaSeekableOnlyInBufferedRanges;
|
2016-10-11 09:22:24 +03:00
|
|
|
|
|
|
|
if (Info().mMetadataDuration.isSome()) {
|
2017-08-01 08:54:14 +03:00
|
|
|
mMaster->mDuration = Info().mMetadataDuration;
|
2016-10-11 09:22:24 +03:00
|
|
|
} else if (Info().mUnadjustedMetadataEndTime.isSome()) {
|
2016-10-16 16:49:07 +03:00
|
|
|
const TimeUnit unadjusted = Info().mUnadjustedMetadataEndTime.ref();
|
|
|
|
const TimeUnit adjustment = Info().mStartTime;
|
|
|
|
mMaster->mInfo->mMetadataDuration.emplace(unadjusted - adjustment);
|
2017-08-01 08:54:14 +03:00
|
|
|
mMaster->mDuration = Info().mMetadataDuration;
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
2016-11-01 12:10:03 +03:00
|
|
|
// If we don't know the duration by this point, we assume infinity, per spec.
|
|
|
|
if (mMaster->mDuration.Ref().isNothing()) {
|
|
|
|
mMaster->mDuration = Some(TimeUnit::FromInfinity());
|
|
|
|
}
|
|
|
|
|
2016-10-11 09:22:24 +03:00
|
|
|
if (mMaster->HasVideo()) {
|
2017-07-19 09:24:35 +03:00
|
|
|
SLOG("Video decode HWAccel=%d videoQueueSize=%d",
|
2016-10-11 09:22:24 +03:00
|
|
|
Reader()->VideoIsHardwareAccelerated(),
|
|
|
|
mMaster->GetAmpleVideoFrames());
|
|
|
|
}
|
|
|
|
|
2016-10-16 16:49:07 +03:00
|
|
|
MOZ_ASSERT(mMaster->mDuration.Ref().isSome());
|
2016-10-11 09:22:24 +03:00
|
|
|
|
2017-05-17 11:00:46 +03:00
|
|
|
mMaster->mMetadataLoadedEvent.Notify(
|
2017-05-18 11:35:49 +03:00
|
|
|
Move(aMetadata.mInfo),
|
|
|
|
Move(aMetadata.mTags),
|
2017-05-17 11:00:46 +03:00
|
|
|
MediaDecoderEventVisibility::Observable);
|
2016-10-29 04:47:49 +03:00
|
|
|
|
2017-08-25 12:11:14 +03:00
|
|
|
SetState<DecodingFirstFrameState>();
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
2016-10-28 11:10:49 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
DormantState::HandlePlayStateChanged(MediaDecoder::PlayState aPlayState)
|
|
|
|
{
|
|
|
|
if (aPlayState == MediaDecoder::PLAY_STATE_PLAYING) {
|
|
|
|
// Exit dormant when the user wants to play.
|
2016-11-03 10:02:22 +03:00
|
|
|
MOZ_ASSERT(mMaster->mSentFirstFrameLoadedEvent);
|
2016-11-30 05:32:29 +03:00
|
|
|
SetSeekingState(Move(mPendingSeek), EventVisibility::Suppressed);
|
2016-10-28 11:10:49 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-11 09:22:24 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
2016-11-23 12:56:12 +03:00
|
|
|
DecodingFirstFrameState::Enter()
|
2016-10-11 09:22:24 +03:00
|
|
|
{
|
|
|
|
// Transition to DECODING if we've decoded first frames.
|
|
|
|
if (mMaster->mSentFirstFrameLoadedEvent) {
|
2016-10-11 09:41:07 +03:00
|
|
|
SetState<DecodingState>();
|
2016-10-11 09:22:24 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-18 09:56:36 +03:00
|
|
|
MOZ_ASSERT(!mMaster->mVideoDecodeSuspended);
|
2016-10-19 12:56:20 +03:00
|
|
|
|
2016-10-11 09:22:24 +03:00
|
|
|
// Dispatch tasks to decode first frames.
|
2016-12-21 23:32:47 +03:00
|
|
|
if (mMaster->HasAudio()) {
|
|
|
|
mMaster->RequestAudioData();
|
|
|
|
}
|
|
|
|
if (mMaster->HasVideo()) {
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(media::TimeUnit());
|
2016-12-21 23:32:47 +03:00
|
|
|
}
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
DecodingFirstFrameState::MaybeFinishDecodeFirstFrame()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mMaster->mSentFirstFrameLoadedEvent);
|
|
|
|
|
2017-09-04 12:27:43 +03:00
|
|
|
if ((mMaster->IsAudioDecoding() && AudioQueue().GetSize() == 0) ||
|
|
|
|
(mMaster->IsVideoDecoding() && VideoQueue().GetSize() == 0)) {
|
2016-10-11 09:22:24 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
mMaster->FinishDecodeFirstFrame();
|
2017-02-08 13:15:28 +03:00
|
|
|
if (mPendingSeek.Exists()) {
|
|
|
|
SetSeekingState(Move(mPendingSeek), EventVisibility::Observable);
|
|
|
|
} else {
|
|
|
|
SetState<DecodingState>();
|
|
|
|
}
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
DecodingState::Enter()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(mMaster->mSentFirstFrameLoadedEvent);
|
|
|
|
|
2017-09-04 12:27:43 +03:00
|
|
|
if (mMaster->mVideoDecodeMode == VideoDecodeMode::Suspend &&
|
|
|
|
!mMaster->mVideoDecodeSuspendTimer.IsScheduled() &&
|
|
|
|
!mMaster->mVideoDecodeSuspended) {
|
2017-03-11 09:06:09 +03:00
|
|
|
// If the VideoDecodeMode is Suspend and the timer is not schedule, it means
|
|
|
|
// the timer has timed out and we should suspend video decoding now if
|
|
|
|
// necessary.
|
2016-10-18 05:48:46 +03:00
|
|
|
HandleVideoSuspendTimeout();
|
|
|
|
}
|
|
|
|
|
2017-01-09 17:37:57 +03:00
|
|
|
if (!mMaster->IsVideoDecoding() && !mMaster->IsAudioDecoding()) {
|
2016-10-11 09:41:07 +03:00
|
|
|
SetState<CompletedState>();
|
2016-10-11 09:22:24 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-12-22 12:28:14 +03:00
|
|
|
mOnAudioPopped = AudioQueue().PopEvent().Connect(
|
|
|
|
OwnerThread(), [this] () {
|
2017-01-04 12:23:28 +03:00
|
|
|
if (mMaster->IsAudioDecoding() && !mMaster->HaveEnoughDecodedAudio()) {
|
2017-01-09 16:11:01 +03:00
|
|
|
EnsureAudioDecodeTaskQueued();
|
2017-01-04 12:10:37 +03:00
|
|
|
}
|
2016-12-22 12:28:14 +03:00
|
|
|
});
|
|
|
|
mOnVideoPopped = VideoQueue().PopEvent().Connect(
|
|
|
|
OwnerThread(), [this] () {
|
2017-01-04 12:23:28 +03:00
|
|
|
if (mMaster->IsVideoDecoding() && !mMaster->HaveEnoughDecodedVideo()) {
|
2017-01-09 16:11:01 +03:00
|
|
|
EnsureVideoDecodeTaskQueued();
|
2017-01-04 12:10:37 +03:00
|
|
|
}
|
2016-12-22 12:28:14 +03:00
|
|
|
});
|
|
|
|
|
2016-11-10 11:35:48 +03:00
|
|
|
mMaster->UpdateNextFrameStatus(MediaDecoderOwner::NEXT_FRAME_AVAILABLE);
|
|
|
|
|
2016-10-11 09:22:24 +03:00
|
|
|
mDecodeStartTime = TimeStamp::Now();
|
|
|
|
|
|
|
|
MaybeStopPrerolling();
|
|
|
|
|
|
|
|
// Ensure that we've got tasks enqueued to decode data if we need to.
|
2017-01-06 10:43:31 +03:00
|
|
|
DispatchDecodeTasksIfNeeded();
|
2016-10-11 09:22:24 +03:00
|
|
|
|
|
|
|
mMaster->ScheduleStateMachine();
|
2016-10-20 09:45:05 +03:00
|
|
|
|
|
|
|
// Will enter dormant when playback is paused for a while.
|
|
|
|
if (mMaster->mPlayState == MediaDecoder::PLAY_STATE_PAUSED) {
|
|
|
|
StartDormantTimer();
|
|
|
|
}
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
2016-10-28 11:30:52 +03:00
|
|
|
void
|
2016-10-11 09:22:24 +03:00
|
|
|
MediaDecoderStateMachine::
|
2017-01-09 08:18:37 +03:00
|
|
|
DecodingState::HandleEndOfAudio()
|
2016-10-11 09:22:24 +03:00
|
|
|
{
|
2017-01-09 08:18:37 +03:00
|
|
|
AudioQueue().Finish();
|
2017-01-09 17:35:54 +03:00
|
|
|
if (!mMaster->IsVideoDecoding()) {
|
2017-01-09 08:18:37 +03:00
|
|
|
SetState<CompletedState>();
|
|
|
|
} else {
|
|
|
|
MaybeStopPrerolling();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
DecodingState::HandleEndOfVideo()
|
|
|
|
{
|
|
|
|
VideoQueue().Finish();
|
2017-01-09 17:35:54 +03:00
|
|
|
if (!mMaster->IsAudioDecoding()) {
|
2016-10-11 09:41:07 +03:00
|
|
|
SetState<CompletedState>();
|
2016-10-11 09:22:24 +03:00
|
|
|
} else {
|
|
|
|
MaybeStopPrerolling();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-06 10:43:31 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
DecodingState::DispatchDecodeTasksIfNeeded()
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
if (mMaster->IsAudioDecoding() &&
|
|
|
|
!mMaster->mMinimizePreroll &&
|
|
|
|
!mMaster->HaveEnoughDecodedAudio()) {
|
2017-01-09 16:11:01 +03:00
|
|
|
EnsureAudioDecodeTaskQueued();
|
2017-01-06 10:43:31 +03:00
|
|
|
}
|
|
|
|
|
2017-09-04 12:27:43 +03:00
|
|
|
if (mMaster->IsVideoDecoding() &&
|
|
|
|
!mMaster->mMinimizePreroll &&
|
|
|
|
!mMaster->HaveEnoughDecodedVideo()) {
|
2017-01-09 16:11:01 +03:00
|
|
|
EnsureVideoDecodeTaskQueued();
|
2017-01-06 10:43:31 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-09 16:11:01 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
DecodingState::EnsureAudioDecodeTaskQueued()
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
if (!mMaster->IsAudioDecoding() ||
|
|
|
|
mMaster->IsRequestingAudioData() ||
|
|
|
|
mMaster->IsWaitingAudioData()) {
|
2017-01-09 16:11:01 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
mMaster->RequestAudioData();
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
DecodingState::EnsureVideoDecodeTaskQueued()
|
|
|
|
{
|
2017-09-04 12:27:43 +03:00
|
|
|
if (!mMaster->IsVideoDecoding() ||
|
|
|
|
mMaster->IsRequestingVideoData() ||
|
|
|
|
mMaster->IsWaitingVideoData()) {
|
2017-01-09 16:11:01 +03:00
|
|
|
return;
|
|
|
|
}
|
2017-06-12 11:06:11 +03:00
|
|
|
mMaster->RequestVideoData(mMaster->GetMediaTime());
|
2017-01-09 16:11:01 +03:00
|
|
|
}
|
|
|
|
|
2016-10-11 10:54:33 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
DecodingState::MaybeStartBuffering()
|
|
|
|
{
|
|
|
|
// Buffering makes senses only after decoding first frames.
|
|
|
|
MOZ_ASSERT(mMaster->mSentFirstFrameLoadedEvent);
|
|
|
|
|
|
|
|
// Don't enter buffering when MediaDecoder is not playing.
|
|
|
|
if (mMaster->mPlayState != MediaDecoder::PLAY_STATE_PLAYING) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Don't enter buffering while prerolling so that the decoder has a chance to
|
|
|
|
// enqueue some decoded data before we give up and start buffering.
|
|
|
|
if (!mMaster->IsPlaying()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-08-09 12:45:12 +03:00
|
|
|
// Note we could have a wait promise pending when playing non-MSE EME.
|
|
|
|
if ((mMaster->OutOfDecodedAudio() && mMaster->IsWaitingAudioData()) ||
|
|
|
|
(mMaster->OutOfDecodedVideo() && mMaster->IsWaitingVideoData())) {
|
|
|
|
SetState<BufferingState>();
|
|
|
|
return;
|
2016-10-11 10:54:33 +03:00
|
|
|
}
|
2017-08-09 12:45:12 +03:00
|
|
|
|
|
|
|
if (Reader()->UseBufferingHeuristics() && mMaster->HasLowDecodedData() &&
|
|
|
|
mMaster->HasLowBufferedData() && !mMaster->mCanPlayThrough) {
|
2016-10-11 10:54:33 +03:00
|
|
|
SetState<BufferingState>();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-11 09:22:24 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
SeekingState::SeekCompleted()
|
|
|
|
{
|
2017-03-28 11:44:40 +03:00
|
|
|
const auto newCurrentTime = CalculateNewCurrentTime();
|
2016-10-11 09:22:24 +03:00
|
|
|
|
2017-08-17 06:48:08 +03:00
|
|
|
if (newCurrentTime == mMaster->Duration() && !mMaster->mIsLiveStream) {
|
2016-11-15 09:53:50 +03:00
|
|
|
// Seeked to end of media. Explicitly finish the queues so DECODING
|
|
|
|
// will transition to COMPLETED immediately. Note we don't do
|
2016-10-11 09:22:24 +03:00
|
|
|
// this when playing a live stream, since the end of media will advance
|
|
|
|
// once we download more data!
|
2016-11-15 09:53:50 +03:00
|
|
|
AudioQueue().Finish();
|
|
|
|
VideoQueue().Finish();
|
2016-11-15 07:06:17 +03:00
|
|
|
|
|
|
|
// We won't start MediaSink when paused. m{Audio,Video}Completed will
|
|
|
|
// remain false and 'playbackEnded' won't be notified. Therefore we
|
|
|
|
// need to set these flags explicitly when seeking to the end.
|
|
|
|
mMaster->mAudioCompleted = true;
|
|
|
|
mMaster->mVideoCompleted = true;
|
2017-02-07 09:33:52 +03:00
|
|
|
|
|
|
|
// There might still be a pending audio request when doing video-only or
|
|
|
|
// next-frame seek. Discard it so we won't break the invariants of the
|
|
|
|
// COMPLETED state by adding audio samples to a finished queue.
|
|
|
|
mMaster->mAudioDataRequest.DisconnectIfExists();
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// We want to resolve the seek request prior finishing the first frame
|
|
|
|
// to ensure that the seeked event is fired prior loadeded.
|
2017-07-04 12:49:43 +03:00
|
|
|
// Note: SeekJob.Resolve() resets SeekJob.mTarget. Don't use mSeekJob anymore
|
|
|
|
// hereafter.
|
2016-11-14 11:47:07 +03:00
|
|
|
mSeekJob.Resolve(__func__);
|
2016-10-11 09:22:24 +03:00
|
|
|
|
|
|
|
// Notify FirstFrameLoaded now if we haven't since we've decoded some data
|
|
|
|
// for readyState to transition to HAVE_CURRENT_DATA and fire 'loadeddata'.
|
|
|
|
if (!mMaster->mSentFirstFrameLoadedEvent) {
|
|
|
|
mMaster->FinishDecodeFirstFrame();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ensure timestamps are up to date.
|
2017-07-06 05:59:16 +03:00
|
|
|
// Suppressed visibility comes from two cases: (1) leaving dormant state,
|
|
|
|
// and (2) resuming suspended video decoder. We want both cases to be
|
|
|
|
// transparent to the user. So we only notify the change when the seek
|
|
|
|
// request is from the user.
|
|
|
|
if (mVisibility == EventVisibility::Observable) {
|
2016-10-11 09:22:24 +03:00
|
|
|
// Don't update playback position for video-only seek.
|
|
|
|
// Otherwise we might have |newCurrentTime > mMediaSink->GetPosition()|
|
|
|
|
// and fail the assertion in GetClock() since we didn't stop MediaSink.
|
2017-03-28 11:44:40 +03:00
|
|
|
mMaster->UpdatePlaybackPositionInternal(newCurrentTime);
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Try to decode another frame to detect if we're at the end...
|
2016-12-16 06:16:31 +03:00
|
|
|
SLOG("Seek completed, mCurrentPosition=%" PRId64,
|
2017-03-28 13:13:22 +03:00
|
|
|
mMaster->mCurrentPosition.Ref().ToMicroseconds());
|
2016-10-11 09:22:24 +03:00
|
|
|
|
2016-10-16 17:15:29 +03:00
|
|
|
if (mMaster->VideoQueue().PeekFront()) {
|
2016-10-11 09:22:24 +03:00
|
|
|
mMaster->mMediaSink->Redraw(Info().mVideo);
|
|
|
|
mMaster->mOnPlaybackEvent.Notify(MediaEventType::Invalidate);
|
|
|
|
}
|
|
|
|
|
2017-05-25 10:09:10 +03:00
|
|
|
GoToNextState();
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
BufferingState::Step()
|
|
|
|
{
|
|
|
|
TimeStamp now = TimeStamp::Now();
|
|
|
|
MOZ_ASSERT(!mBufferingStart.IsNull(), "Must know buffering start time.");
|
|
|
|
|
|
|
|
if (Reader()->UseBufferingHeuristics()) {
|
2017-08-09 12:20:53 +03:00
|
|
|
if (mMaster->IsWaitingAudioData() || mMaster->IsWaitingVideoData()) {
|
|
|
|
// Can't exit buffering when we are still waiting for data.
|
|
|
|
// Note we don't schedule next loop for we will do that when the wait
|
|
|
|
// promise is resolved.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// With buffering heuristics, we exit buffering state when we:
|
|
|
|
// 1. can play through or
|
|
|
|
// 2. time out (specified by mBufferingWait) or
|
|
|
|
// 3. have enough buffered data.
|
2016-10-11 09:22:24 +03:00
|
|
|
TimeDuration elapsed = now - mBufferingStart;
|
2017-08-09 12:20:53 +03:00
|
|
|
TimeDuration timeout =
|
|
|
|
TimeDuration::FromSeconds(mBufferingWait * mMaster->mPlaybackRate);
|
|
|
|
bool stopBuffering =
|
|
|
|
mMaster->mCanPlayThrough || elapsed >= timeout ||
|
|
|
|
!mMaster->HasLowBufferedData(TimeUnit::FromSeconds(mBufferingWait));
|
|
|
|
if (!stopBuffering) {
|
2016-10-11 09:22:24 +03:00
|
|
|
SLOG("Buffering: wait %ds, timeout in %.3lfs",
|
|
|
|
mBufferingWait, mBufferingWait - elapsed.ToSeconds());
|
2017-03-28 12:10:59 +03:00
|
|
|
mMaster->ScheduleStateMachineIn(TimeUnit::FromMicroseconds(USECS_PER_S));
|
2016-10-11 09:22:24 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
} else if (mMaster->OutOfDecodedAudio() || mMaster->OutOfDecodedVideo()) {
|
2017-09-04 12:27:43 +03:00
|
|
|
MOZ_ASSERT(!mMaster->OutOfDecodedAudio() ||
|
|
|
|
mMaster->IsRequestingAudioData() ||
|
|
|
|
mMaster->IsWaitingAudioData());
|
|
|
|
MOZ_ASSERT(!mMaster->OutOfDecodedVideo() ||
|
|
|
|
mMaster->IsRequestingVideoData() ||
|
|
|
|
mMaster->IsWaitingVideoData());
|
2016-10-11 09:22:24 +03:00
|
|
|
SLOG("In buffering mode, waiting to be notified: outOfAudio: %d, "
|
|
|
|
"mAudioStatus: %s, outOfVideo: %d, mVideoStatus: %s",
|
2017-09-04 12:27:43 +03:00
|
|
|
mMaster->OutOfDecodedAudio(),
|
|
|
|
mMaster->AudioRequestStatus(),
|
|
|
|
mMaster->OutOfDecodedVideo(),
|
|
|
|
mMaster->VideoRequestStatus());
|
2016-10-11 09:22:24 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
SLOG("Buffered for %.3lfs", (now - mBufferingStart).ToSeconds());
|
2016-10-11 09:41:07 +03:00
|
|
|
SetState<DecodingState>();
|
2016-10-11 09:22:24 +03:00
|
|
|
}
|
|
|
|
|
2016-10-28 11:30:52 +03:00
|
|
|
void
|
2016-10-11 09:22:24 +03:00
|
|
|
MediaDecoderStateMachine::
|
2017-01-09 08:18:37 +03:00
|
|
|
BufferingState::HandleEndOfAudio()
|
|
|
|
{
|
|
|
|
AudioQueue().Finish();
|
2017-01-09 17:35:54 +03:00
|
|
|
if (!mMaster->IsVideoDecoding()) {
|
2017-01-09 08:18:37 +03:00
|
|
|
SetState<CompletedState>();
|
|
|
|
} else {
|
|
|
|
// Check if we can exit buffering.
|
|
|
|
mMaster->ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::
|
|
|
|
BufferingState::HandleEndOfVideo()
|
2016-10-11 09:22:24 +03:00
|
|
|
{
|
2017-01-09 08:18:37 +03:00
|
|
|
VideoQueue().Finish();
|
2017-01-09 17:35:54 +03:00
|
|
|
if (!mMaster->IsAudioDecoding()) {
|
2016-10-11 09:41:07 +03:00
|
|
|
SetState<CompletedState>();
|
2016-10-11 09:22:24 +03:00
|
|
|
} else {
|
|
|
|
// Check if we can exit buffering.
|
|
|
|
mMaster->ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-13 08:53:32 +03:00
|
|
|
RefPtr<ShutdownPromise>
|
2016-10-11 11:10:14 +03:00
|
|
|
MediaDecoderStateMachine::
|
|
|
|
ShutdownState::Enter()
|
|
|
|
{
|
|
|
|
auto master = mMaster;
|
|
|
|
|
|
|
|
master->mDelayedScheduler.Reset();
|
|
|
|
|
|
|
|
// Shutdown happens while decode timer is active, we need to disconnect and
|
|
|
|
// dispose of the timer.
|
2017-03-08 16:28:01 +03:00
|
|
|
master->CancelSuspendTimer();
|
2016-10-11 11:10:14 +03:00
|
|
|
|
|
|
|
if (master->IsPlaying()) {
|
|
|
|
master->StopPlayback();
|
|
|
|
}
|
|
|
|
|
2016-12-20 07:54:23 +03:00
|
|
|
master->mAudioDataRequest.DisconnectIfExists();
|
2016-12-20 08:17:05 +03:00
|
|
|
master->mVideoDataRequest.DisconnectIfExists();
|
2016-12-20 09:31:29 +03:00
|
|
|
master->mAudioWaitRequest.DisconnectIfExists();
|
|
|
|
master->mVideoWaitRequest.DisconnectIfExists();
|
2016-10-11 11:10:14 +03:00
|
|
|
|
2016-12-22 11:56:59 +03:00
|
|
|
master->ResetDecode();
|
2016-12-22 11:48:54 +03:00
|
|
|
master->StopMediaSink();
|
2016-10-11 11:10:14 +03:00
|
|
|
master->mMediaSink->Shutdown();
|
|
|
|
|
|
|
|
// Prevent dangling pointers by disconnecting the listeners.
|
|
|
|
master->mAudioQueueListener.Disconnect();
|
|
|
|
master->mVideoQueueListener.Disconnect();
|
|
|
|
master->mMetadataManager.Disconnect();
|
2016-11-02 10:52:15 +03:00
|
|
|
master->mOnMediaNotSeekable.Disconnect();
|
2016-10-11 11:10:14 +03:00
|
|
|
|
|
|
|
// Disconnect canonicals and mirrors before shutting down our task queue.
|
|
|
|
master->mBuffered.DisconnectIfConnected();
|
|
|
|
master->mPlayState.DisconnectIfConnected();
|
|
|
|
master->mVolume.DisconnectIfConnected();
|
|
|
|
master->mPreservesPitch.DisconnectIfConnected();
|
2017-06-08 07:03:10 +03:00
|
|
|
master->mLooping.DisconnectIfConnected();
|
2016-10-11 11:10:14 +03:00
|
|
|
master->mSameOriginMedia.DisconnectIfConnected();
|
|
|
|
master->mMediaPrincipalHandle.DisconnectIfConnected();
|
|
|
|
|
|
|
|
master->mDuration.DisconnectAll();
|
|
|
|
master->mNextFrameStatus.DisconnectAll();
|
|
|
|
master->mCurrentPosition.DisconnectAll();
|
|
|
|
master->mPlaybackOffset.DisconnectAll();
|
|
|
|
master->mIsAudioDataAudible.DisconnectAll();
|
|
|
|
|
|
|
|
// Shut down the watch manager to stop further notifications.
|
|
|
|
master->mWatchManager.Shutdown();
|
2016-10-13 08:53:32 +03:00
|
|
|
|
2016-12-04 03:51:25 +03:00
|
|
|
return Reader()->Shutdown()->Then(
|
2016-11-30 13:19:26 +03:00
|
|
|
OwnerThread(), __func__, master,
|
|
|
|
&MediaDecoderStateMachine::FinishShutdown,
|
|
|
|
&MediaDecoderStateMachine::FinishShutdown);
|
2016-10-11 11:10:14 +03:00
|
|
|
}
|
|
|
|
|
2016-07-14 10:09:06 +03:00
|
|
|
#define INIT_WATCHABLE(name, val) \
|
|
|
|
name(val, "MediaDecoderStateMachine::" #name)
|
|
|
|
#define INIT_MIRROR(name, val) \
|
|
|
|
name(mTaskQueue, val, "MediaDecoderStateMachine::" #name " (Mirror)")
|
|
|
|
#define INIT_CANONICAL(name, val) \
|
|
|
|
name(mTaskQueue, val, "MediaDecoderStateMachine::" #name " (Canonical)")
|
|
|
|
|
2012-11-14 23:46:40 +04:00
|
|
|
MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
|
2017-07-19 12:01:32 +03:00
|
|
|
MediaFormatReader* aReader) :
|
2015-12-03 10:59:30 +03:00
|
|
|
mDecoderID(aDecoder),
|
2016-11-29 08:03:36 +03:00
|
|
|
mAbstractMainThread(aDecoder->AbstractMainThread()),
|
2015-12-03 10:59:30 +03:00
|
|
|
mFrameStats(&aDecoder->GetFrameStatistics()),
|
|
|
|
mVideoFrameContainer(aDecoder->GetVideoFrameContainer()),
|
2017-04-25 09:57:55 +03:00
|
|
|
mTaskQueue(new TaskQueue(
|
|
|
|
GetMediaThreadPool(MediaThreadType::PLAYBACK),
|
|
|
|
"MDSM::mTaskQueue", /* aSupportsTailDispatch = */ true)),
|
2015-05-01 02:21:14 +03:00
|
|
|
mWatchManager(this, mTaskQueue),
|
2015-03-10 19:59:30 +03:00
|
|
|
mDispatchedStateMachine(false),
|
2015-10-13 10:39:01 +03:00
|
|
|
mDelayedScheduler(mTaskQueue),
|
2015-07-03 10:33:56 +03:00
|
|
|
mCurrentFrameID(0),
|
2017-07-27 09:49:57 +03:00
|
|
|
mReader(new ReaderProxy(mTaskQueue, aReader)),
|
2012-11-22 14:38:28 +04:00
|
|
|
mPlaybackRate(1.0),
|
2017-03-28 09:38:30 +03:00
|
|
|
mAmpleAudioThreshold(detail::AMPLE_AUDIO_THRESHOLD),
|
2016-01-12 16:48:25 +03:00
|
|
|
mAudioCaptured(false),
|
2017-01-05 12:59:19 +03:00
|
|
|
mMinimizePreroll(aDecoder->GetMinimizePreroll()),
|
2016-01-12 16:48:25 +03:00
|
|
|
mSentFirstFrameLoadedEvent(false),
|
2016-05-30 10:25:10 +03:00
|
|
|
mVideoDecodeSuspended(false),
|
|
|
|
mVideoDecodeSuspendTimer(mTaskQueue),
|
2015-12-24 05:14:16 +03:00
|
|
|
mOutputStreamManager(new OutputStreamManager()),
|
2017-03-10 11:17:46 +03:00
|
|
|
mVideoDecodeMode(VideoDecodeMode::Normal),
|
2017-02-08 13:15:28 +03:00
|
|
|
mIsMSE(aDecoder->IsMSE()),
|
2016-07-14 10:09:06 +03:00
|
|
|
INIT_MIRROR(mBuffered, TimeIntervals()),
|
|
|
|
INIT_MIRROR(mPlayState, MediaDecoder::PLAY_STATE_LOADING),
|
|
|
|
INIT_MIRROR(mVolume, 1.0),
|
|
|
|
INIT_MIRROR(mPreservesPitch, true),
|
2017-06-08 07:03:10 +03:00
|
|
|
INIT_MIRROR(mLooping, false),
|
2016-07-14 10:09:06 +03:00
|
|
|
INIT_MIRROR(mSameOriginMedia, false),
|
|
|
|
INIT_MIRROR(mMediaPrincipalHandle, PRINCIPAL_HANDLE_NONE),
|
|
|
|
INIT_CANONICAL(mDuration, NullableTimeUnit()),
|
2016-11-10 11:35:48 +03:00
|
|
|
INIT_CANONICAL(mNextFrameStatus, MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE),
|
2017-03-28 13:13:22 +03:00
|
|
|
INIT_CANONICAL(mCurrentPosition, TimeUnit::Zero()),
|
2016-07-14 10:09:06 +03:00
|
|
|
INIT_CANONICAL(mPlaybackOffset, 0),
|
|
|
|
INIT_CANONICAL(mIsAudioDataAudible, false)
|
2017-05-04 07:54:19 +03:00
|
|
|
#ifdef XP_WIN
|
|
|
|
, mShouldUseHiResTimers(Preferences::GetBool("media.hi-res-timers.enabled", true))
|
|
|
|
#endif
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2012-11-14 23:46:40 +04:00
|
|
|
MOZ_COUNT_CTOR(MediaDecoderStateMachine);
|
2011-07-12 07:39:34 +04:00
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-11-08 05:38:17 +04:00
|
|
|
|
2015-10-11 15:11:04 +03:00
|
|
|
InitVideoQueuePrefs();
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2016-07-14 10:09:06 +03:00
|
|
|
#undef INIT_WATCHABLE
|
|
|
|
#undef INIT_MIRROR
|
|
|
|
#undef INIT_CANONICAL
|
|
|
|
|
2012-11-14 23:46:40 +04:00
|
|
|
MediaDecoderStateMachine::~MediaDecoderStateMachine()
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2014-02-18 02:53:53 +04:00
|
|
|
MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
|
2012-11-14 23:46:40 +04:00
|
|
|
MOZ_COUNT_DTOR(MediaDecoderStateMachine);
|
2014-02-18 02:53:52 +04:00
|
|
|
|
2013-05-03 04:39:19 +04:00
|
|
|
#ifdef XP_WIN
|
2017-05-04 07:54:19 +03:00
|
|
|
MOZ_ASSERT(!mHiResTimersRequested);
|
2013-05-03 04:39:19 +04:00
|
|
|
#endif
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2015-05-01 01:46:49 +03:00
|
|
|
void
|
2015-12-03 10:59:30 +03:00
|
|
|
MediaDecoderStateMachine::InitializationTask(MediaDecoder* aDecoder)
|
2015-05-01 01:46:49 +03:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
|
|
|
|
// Connect mirrors.
|
2016-04-20 09:45:41 +03:00
|
|
|
mBuffered.Connect(mReader->CanonicalBuffered());
|
2015-12-03 10:59:30 +03:00
|
|
|
mPlayState.Connect(aDecoder->CanonicalPlayState());
|
|
|
|
mVolume.Connect(aDecoder->CanonicalVolume());
|
|
|
|
mPreservesPitch.Connect(aDecoder->CanonicalPreservesPitch());
|
2017-06-08 07:03:10 +03:00
|
|
|
mLooping.Connect(aDecoder->CanonicalLooping());
|
2015-12-03 10:59:30 +03:00
|
|
|
mSameOriginMedia.Connect(aDecoder->CanonicalSameOriginMedia());
|
2016-02-02 08:14:13 +03:00
|
|
|
mMediaPrincipalHandle.Connect(aDecoder->CanonicalMediaPrincipalHandle());
|
2015-05-01 01:46:49 +03:00
|
|
|
|
|
|
|
// Initialize watchers.
|
2017-01-27 15:20:37 +03:00
|
|
|
mWatchManager.Watch(mBuffered,
|
|
|
|
&MediaDecoderStateMachine::BufferedRangeUpdated);
|
2015-04-30 03:27:32 +03:00
|
|
|
mWatchManager.Watch(mVolume, &MediaDecoderStateMachine::VolumeChanged);
|
2017-01-27 15:20:37 +03:00
|
|
|
mWatchManager.Watch(mPreservesPitch,
|
|
|
|
&MediaDecoderStateMachine::PreservesPitchChanged);
|
2015-05-07 18:19:33 +03:00
|
|
|
mWatchManager.Watch(mPlayState, &MediaDecoderStateMachine::PlayStateChanged);
|
2016-07-14 12:52:41 +03:00
|
|
|
|
2017-03-08 15:44:43 +03:00
|
|
|
MOZ_ASSERT(!mStateObj);
|
|
|
|
auto* s = new DecodeMetadataState(this);
|
|
|
|
mStateObj.reset(s);
|
|
|
|
s->Enter();
|
2015-05-01 01:46:49 +03:00
|
|
|
}
|
|
|
|
|
2016-05-12 09:18:53 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::AudioAudibleChanged(bool aAudible)
|
|
|
|
{
|
|
|
|
mIsAudioDataAudible = aAudible;
|
|
|
|
}
|
|
|
|
|
2015-09-09 05:12:35 +03:00
|
|
|
media::MediaSink*
|
|
|
|
MediaDecoderStateMachine::CreateAudioSink()
|
|
|
|
{
|
2015-10-18 08:24:48 +03:00
|
|
|
RefPtr<MediaDecoderStateMachine> self = this;
|
2015-09-09 05:12:35 +03:00
|
|
|
auto audioSinkCreator = [self] () {
|
|
|
|
MOZ_ASSERT(self->OnTaskQueue());
|
2017-03-22 09:55:59 +03:00
|
|
|
AudioSink* audioSink = new AudioSink(
|
2017-03-27 06:37:17 +03:00
|
|
|
self->mTaskQueue, self->mAudioQueue,
|
2017-03-28 11:16:56 +03:00
|
|
|
self->GetMediaTime(),
|
2017-08-10 04:00:07 +03:00
|
|
|
self->Info().mAudio);
|
2016-05-12 09:18:53 +03:00
|
|
|
|
|
|
|
self->mAudibleListener = audioSink->AudibleEvent().Connect(
|
2017-01-27 15:20:37 +03:00
|
|
|
self->mTaskQueue, self.get(),
|
|
|
|
&MediaDecoderStateMachine::AudioAudibleChanged);
|
2016-05-12 09:18:53 +03:00
|
|
|
return audioSink;
|
2015-09-09 05:12:35 +03:00
|
|
|
};
|
|
|
|
return new AudioSinkWrapper(mTaskQueue, audioSinkCreator);
|
|
|
|
}
|
|
|
|
|
2015-10-19 12:32:16 +03:00
|
|
|
already_AddRefed<media::MediaSink>
|
|
|
|
MediaDecoderStateMachine::CreateMediaSink(bool aAudioCaptured)
|
|
|
|
{
|
2017-01-27 15:20:37 +03:00
|
|
|
RefPtr<media::MediaSink> audioSink =
|
|
|
|
aAudioCaptured
|
|
|
|
? new DecodedStream(mTaskQueue, mAbstractMainThread, mAudioQueue,
|
|
|
|
mVideoQueue, mOutputStreamManager,
|
|
|
|
mSameOriginMedia.Ref(), mMediaPrincipalHandle.Ref())
|
2016-01-08 10:52:12 +03:00
|
|
|
: CreateAudioSink();
|
2015-10-19 12:32:16 +03:00
|
|
|
|
2015-10-19 13:08:11 +03:00
|
|
|
RefPtr<media::MediaSink> mediaSink =
|
|
|
|
new VideoSink(mTaskQueue, audioSink, mVideoQueue,
|
2015-12-17 05:53:49 +03:00
|
|
|
mVideoFrameContainer, *mFrameStats,
|
2015-10-19 13:08:11 +03:00
|
|
|
sVideoQueueSendToCompositorSize);
|
2015-10-19 12:32:16 +03:00
|
|
|
return mediaSink.forget();
|
|
|
|
}
|
|
|
|
|
2017-03-28 11:54:39 +03:00
|
|
|
TimeUnit
|
2015-10-07 07:09:34 +03:00
|
|
|
MediaDecoderStateMachine::GetDecodedAudioDuration()
|
2015-05-06 07:32:33 +03:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-09 05:12:23 +03:00
|
|
|
if (mMediaSink->IsStarted()) {
|
2016-01-26 06:31:56 +03:00
|
|
|
// mDecodedAudioEndTime might be smaller than GetClock() when there is
|
|
|
|
// overlap between 2 adjacent audio samples or when we are playing
|
|
|
|
// a chained ogg file.
|
2017-03-28 12:20:17 +03:00
|
|
|
return std::max(mDecodedAudioEndTime - GetClock(), TimeUnit::Zero());
|
2011-01-13 04:06:15 +03:00
|
|
|
}
|
2015-10-07 07:09:34 +03:00
|
|
|
// MediaSink not started. All audio samples are in the queue.
|
2017-03-28 11:54:39 +03:00
|
|
|
return TimeUnit::FromMicroseconds(AudioQueue().Duration());
|
2011-01-13 04:06:15 +03:00
|
|
|
}
|
|
|
|
|
2017-01-05 06:50:20 +03:00
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::HaveEnoughDecodedAudio()
|
2012-04-30 07:12:42 +04:00
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-29 10:00:46 +03:00
|
|
|
auto ampleAudio = mAmpleAudioThreshold.MultDouble(mPlaybackRate);
|
2017-09-04 12:27:43 +03:00
|
|
|
return AudioQueue().GetSize() > 0 && GetDecodedAudioDuration() >= ampleAudio;
|
2012-04-30 07:12:42 +04:00
|
|
|
}
|
|
|
|
|
2012-11-14 23:46:40 +04:00
|
|
|
bool MediaDecoderStateMachine::HaveEnoughDecodedVideo()
|
2012-04-30 07:12:42 +04:00
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-01-05 07:09:55 +03:00
|
|
|
return VideoQueue().GetSize() >= GetAmpleVideoFrames() * mPlaybackRate + 1;
|
2012-04-30 07:12:42 +04:00
|
|
|
}
|
|
|
|
|
2016-12-21 10:36:44 +03:00
|
|
|
void
|
2017-03-22 06:59:54 +03:00
|
|
|
MediaDecoderStateMachine::PushAudio(AudioData* aSample)
|
2016-12-21 10:36:44 +03:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
MOZ_ASSERT(aSample);
|
|
|
|
AudioQueue().Push(aSample);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2017-03-22 06:59:54 +03:00
|
|
|
MediaDecoderStateMachine::PushVideo(VideoData* aSample)
|
2016-12-21 10:36:44 +03:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
MOZ_ASSERT(aSample);
|
2017-03-22 06:59:54 +03:00
|
|
|
aSample->mFrameID = ++mCurrentFrameID;
|
2016-12-21 10:36:44 +03:00
|
|
|
VideoQueue().Push(aSample);
|
|
|
|
}
|
|
|
|
|
2015-06-10 05:55:56 +03:00
|
|
|
void
|
2017-03-22 06:59:54 +03:00
|
|
|
MediaDecoderStateMachine::OnAudioPopped(const RefPtr<AudioData>& aSample)
|
2015-04-01 04:44:36 +03:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-15 08:51:12 +03:00
|
|
|
mPlaybackOffset = std::max(mPlaybackOffset.Ref(), aSample->mOffset);
|
2015-04-01 04:44:36 +03:00
|
|
|
}
|
|
|
|
|
2015-06-10 05:55:56 +03:00
|
|
|
void
|
2017-03-22 06:59:54 +03:00
|
|
|
MediaDecoderStateMachine::OnVideoPopped(const RefPtr<VideoData>& aSample)
|
2015-04-01 04:44:36 +03:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-15 08:51:12 +03:00
|
|
|
mPlaybackOffset = std::max(mPlaybackOffset.Ref(), aSample->mOffset);
|
2014-11-03 11:20:14 +03:00
|
|
|
}
|
|
|
|
|
2014-06-18 09:07:02 +04:00
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::IsAudioDecoding()
|
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2014-06-18 09:07:02 +04:00
|
|
|
return HasAudio() && !AudioQueue().IsFinished();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::IsVideoDecoding()
|
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2014-06-18 09:07:02 +04:00
|
|
|
return HasVideo() && !VideoQueue().IsFinished();
|
2014-03-11 07:44:09 +04:00
|
|
|
}
|
|
|
|
|
2014-10-30 23:12:00 +03:00
|
|
|
bool MediaDecoderStateMachine::IsPlaying() const
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-10-06 05:26:33 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-10 06:31:30 +03:00
|
|
|
return mMediaSink->IsPlaying();
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2017-02-17 22:44:51 +03:00
|
|
|
void MediaDecoderStateMachine::SetMediaNotSeekable()
|
|
|
|
{
|
|
|
|
mMediaSeekable = false;
|
|
|
|
}
|
|
|
|
|
2016-04-01 18:10:44 +03:00
|
|
|
nsresult MediaDecoderStateMachine::Init(MediaDecoder* aDecoder)
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2014-02-18 02:53:52 +04:00
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2016-04-01 18:10:44 +03:00
|
|
|
|
|
|
|
// Dispatch initialization that needs to happen on that task queue.
|
2016-05-05 11:45:00 +03:00
|
|
|
nsCOMPtr<nsIRunnable> r = NewRunnableMethod<RefPtr<MediaDecoder>>(
|
2017-06-12 22:34:10 +03:00
|
|
|
"MediaDecoderStateMachine::InitializationTask",
|
|
|
|
this,
|
|
|
|
&MediaDecoderStateMachine::InitializationTask,
|
|
|
|
aDecoder);
|
2017-03-08 15:44:43 +03:00
|
|
|
mTaskQueue->DispatchStateChange(r.forget());
|
2016-04-01 18:10:44 +03:00
|
|
|
|
|
|
|
mAudioQueueListener = AudioQueue().PopEvent().Connect(
|
|
|
|
mTaskQueue, this, &MediaDecoderStateMachine::OnAudioPopped);
|
|
|
|
mVideoQueueListener = VideoQueue().PopEvent().Connect(
|
|
|
|
mTaskQueue, this, &MediaDecoderStateMachine::OnVideoPopped);
|
|
|
|
|
2016-04-20 09:45:41 +03:00
|
|
|
mMetadataManager.Connect(mReader->TimedMetadataEvent(), OwnerThread());
|
2016-04-01 18:10:44 +03:00
|
|
|
|
2016-11-02 10:52:15 +03:00
|
|
|
mOnMediaNotSeekable = mReader->OnMediaNotSeekable().Connect(
|
2017-02-17 22:44:51 +03:00
|
|
|
OwnerThread(), this, &MediaDecoderStateMachine::SetMediaNotSeekable);
|
2016-11-02 10:52:15 +03:00
|
|
|
|
2016-04-01 18:10:44 +03:00
|
|
|
mMediaSink = CreateMediaSink(mAudioCaptured);
|
|
|
|
|
2016-04-20 09:45:41 +03:00
|
|
|
nsresult rv = mReader->Init();
|
2014-06-18 09:07:02 +04:00
|
|
|
NS_ENSURE_SUCCESS(rv, rv);
|
2016-03-19 02:51:06 +03:00
|
|
|
|
2017-07-06 06:45:02 +03:00
|
|
|
mReader->SetCanonicalDuration(&mDuration);
|
|
|
|
|
2014-06-18 09:07:02 +04:00
|
|
|
return NS_OK;
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2017-01-05 09:32:53 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::StopPlayback()
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("StopPlayback()");
|
2011-07-12 07:39:37 +04:00
|
|
|
|
2015-12-01 04:34:02 +03:00
|
|
|
mOnPlaybackEvent.Notify(MediaEventType::PlaybackStopped);
|
2011-01-18 03:53:18 +03:00
|
|
|
|
2010-04-02 07:03:07 +04:00
|
|
|
if (IsPlaying()) {
|
2015-09-10 06:31:30 +03:00
|
|
|
mMediaSink->SetPlaying(false);
|
|
|
|
MOZ_ASSERT(!IsPlaying());
|
2017-05-04 07:54:19 +03:00
|
|
|
#ifdef XP_WIN
|
|
|
|
if (mHiResTimersRequested) {
|
|
|
|
mHiResTimersRequested = false;
|
|
|
|
timeEndPeriod(1);
|
|
|
|
}
|
|
|
|
#endif
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-12-30 10:16:48 +03:00
|
|
|
void MediaDecoderStateMachine::MaybeStartPlayback()
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-04-07 22:11:41 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-09-01 09:28:45 +03:00
|
|
|
// Should try to start playback only after decoding first frames.
|
|
|
|
MOZ_ASSERT(mSentFirstFrameLoadedEvent);
|
2015-07-02 05:34:05 +03:00
|
|
|
|
2014-12-30 10:16:48 +03:00
|
|
|
if (IsPlaying()) {
|
|
|
|
// Logging this case is really spammy - don't do it.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-01-05 12:38:17 +03:00
|
|
|
if (mPlayState != MediaDecoder::PLAY_STATE_PLAYING) {
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("Not starting playback [mPlayState=%d]", mPlayState.Ref());
|
2014-08-18 16:49:51 +04:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("MaybeStartPlayback() starting playback");
|
2015-12-01 04:34:02 +03:00
|
|
|
mOnPlaybackEvent.Notify(MediaEventType::PlaybackStarted);
|
2015-09-09 05:12:23 +03:00
|
|
|
StartMediaSink();
|
2015-07-11 11:41:39 +03:00
|
|
|
|
2017-05-04 07:54:19 +03:00
|
|
|
#ifdef XP_WIN
|
|
|
|
if (!mHiResTimersRequested && mShouldUseHiResTimers) {
|
|
|
|
mHiResTimersRequested = true;
|
|
|
|
// Ensure high precision timers are enabled on Windows, otherwise the state
|
|
|
|
// machine isn't woken up at reliable intervals to set the next frame, and we
|
|
|
|
// drop frames while painting. Note that each call must be matched by a
|
|
|
|
// corresponding timeEndPeriod() call. Enabling high precision timers causes
|
|
|
|
// the CPU to wake up more frequently on Windows 7 and earlier, which causes
|
|
|
|
// more CPU load and battery use. So we only enable high precision timers
|
|
|
|
// when we're actually playing.
|
|
|
|
timeBeginPeriod(1);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2015-09-10 06:31:30 +03:00
|
|
|
if (!IsPlaying()) {
|
|
|
|
mMediaSink->SetPlaying(true);
|
|
|
|
MOZ_ASSERT(IsPlaying());
|
|
|
|
}
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2017-03-28 10:57:38 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::UpdatePlaybackPositionInternal(const TimeUnit& aTime)
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-28 10:57:38 +03:00
|
|
|
LOGV("UpdatePlaybackPositionInternal(%" PRId64 ")", aTime.ToMicroseconds());
|
2010-04-02 07:03:07 +04:00
|
|
|
|
2017-03-28 13:13:22 +03:00
|
|
|
mCurrentPosition = aTime;
|
|
|
|
NS_ASSERTION(mCurrentPosition.Ref() >= TimeUnit::Zero(),
|
|
|
|
"CurrentTime should be positive!");
|
2017-08-01 08:54:14 +03:00
|
|
|
mDuration = Some(std::max(mDuration.Ref().ref(), mCurrentPosition.Ref()));
|
2011-02-01 05:57:13 +03:00
|
|
|
}
|
|
|
|
|
2017-03-28 11:01:55 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::UpdatePlaybackPosition(const TimeUnit& aTime)
|
2011-02-01 05:57:13 +03:00
|
|
|
{
|
2015-04-07 22:11:41 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-28 11:01:55 +03:00
|
|
|
UpdatePlaybackPositionInternal(aTime);
|
2011-02-01 05:57:13 +03:00
|
|
|
|
2017-09-04 12:27:43 +03:00
|
|
|
bool fragmentEnded =
|
|
|
|
mFragmentEndTime.IsValid() && GetMediaTime() >= mFragmentEndTime;
|
2017-03-28 11:01:55 +03:00
|
|
|
mMetadataManager.DispatchMetadataIfNeeded(aTime);
|
2012-11-30 17:17:54 +04:00
|
|
|
|
2011-08-25 03:42:23 +04:00
|
|
|
if (fragmentEnded) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2016-08-12 09:54:12 +03:00
|
|
|
/* static */ const char*
|
|
|
|
MediaDecoderStateMachine::ToStateStr(State aState)
|
|
|
|
{
|
|
|
|
switch (aState) {
|
2016-09-01 09:28:45 +03:00
|
|
|
case DECODER_STATE_DECODING_METADATA: return "DECODING_METADATA";
|
|
|
|
case DECODER_STATE_DORMANT: return "DORMANT";
|
|
|
|
case DECODER_STATE_DECODING_FIRSTFRAME: return "DECODING_FIRSTFRAME";
|
|
|
|
case DECODER_STATE_DECODING: return "DECODING";
|
|
|
|
case DECODER_STATE_SEEKING: return "SEEKING";
|
|
|
|
case DECODER_STATE_BUFFERING: return "BUFFERING";
|
|
|
|
case DECODER_STATE_COMPLETED: return "COMPLETED";
|
|
|
|
case DECODER_STATE_SHUTDOWN: return "SHUTDOWN";
|
2016-08-12 09:54:12 +03:00
|
|
|
default: MOZ_ASSERT_UNREACHABLE("Invalid state.");
|
|
|
|
}
|
|
|
|
return "UNKNOWN";
|
|
|
|
}
|
|
|
|
|
|
|
|
const char*
|
|
|
|
MediaDecoderStateMachine::ToStateStr()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-01-13 10:19:13 +03:00
|
|
|
return ToStateStr(mStateObj->GetState());
|
2016-08-12 09:54:12 +03:00
|
|
|
}
|
2014-08-28 04:46:00 +04:00
|
|
|
|
2015-04-30 03:27:32 +03:00
|
|
|
void MediaDecoderStateMachine::VolumeChanged()
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-04-30 03:27:32 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-09 05:12:23 +03:00
|
|
|
mMediaSink->SetVolume(mVolume);
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2015-12-03 10:59:44 +03:00
|
|
|
RefPtr<ShutdownPromise>
|
|
|
|
MediaDecoderStateMachine::Shutdown()
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-03-25 01:12:08 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-10-11 11:20:23 +03:00
|
|
|
return mStateObj->HandleShutdown();
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2015-05-07 18:19:33 +03:00
|
|
|
void MediaDecoderStateMachine::PlayStateChanged()
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2014-12-30 10:16:48 +03:00
|
|
|
|
2015-05-07 18:19:33 +03:00
|
|
|
if (mPlayState != MediaDecoder::PLAY_STATE_PLAYING) {
|
2017-03-08 16:28:01 +03:00
|
|
|
CancelSuspendTimer();
|
2016-10-21 09:53:18 +03:00
|
|
|
} else if (mMinimizePreroll) {
|
|
|
|
// Once we start playing, we don't want to minimize our prerolling, as we
|
2017-01-27 15:20:37 +03:00
|
|
|
// assume the user is likely to want to keep playing in future. This needs
|
|
|
|
// to happen before we invoke StartDecoding().
|
2014-12-30 10:16:48 +03:00
|
|
|
mMinimizePreroll = false;
|
|
|
|
}
|
|
|
|
|
2016-10-21 09:53:18 +03:00
|
|
|
mStateObj->HandlePlayStateChanged(mPlayState);
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2017-03-10 11:17:46 +03:00
|
|
|
void MediaDecoderStateMachine::SetVideoDecodeMode(VideoDecodeMode aMode)
|
|
|
|
{
|
2017-08-01 00:29:07 +03:00
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2017-06-12 22:34:10 +03:00
|
|
|
nsCOMPtr<nsIRunnable> r = NewRunnableMethod<VideoDecodeMode>(
|
|
|
|
"MediaDecoderStateMachine::SetVideoDecodeModeInternal",
|
|
|
|
this,
|
|
|
|
&MediaDecoderStateMachine::SetVideoDecodeModeInternal,
|
|
|
|
aMode);
|
2017-03-10 11:17:46 +03:00
|
|
|
OwnerThread()->DispatchStateChange(r.forget());
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetVideoDecodeModeInternal(VideoDecodeMode aMode)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
|
2017-07-17 13:38:35 +03:00
|
|
|
// Should not suspend decoding if we don't turn on the pref.
|
|
|
|
if (!MediaPrefs::MDSMSuspendBackgroundVideoEnabled() &&
|
|
|
|
aMode == VideoDecodeMode::Suspend) {
|
2017-07-24 08:21:35 +03:00
|
|
|
LOG("SetVideoDecodeModeInternal(), early return because preference off and set to Suspend");
|
2017-03-10 11:17:46 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (aMode == mVideoDecodeMode) {
|
2017-07-24 08:21:35 +03:00
|
|
|
LOG("SetVideoDecodeModeInternal(), early return because the mode does not change");
|
2017-03-10 11:17:46 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-07-24 08:21:35 +03:00
|
|
|
LOG("SetVideoDecodeModeInternal(), VideoDecodeMode=(%s->%s), mVideoDecodeSuspended=%c",
|
|
|
|
mVideoDecodeMode == VideoDecodeMode::Normal ? "Normal" : "Suspend",
|
|
|
|
aMode == VideoDecodeMode::Normal ? "Normal" : "Suspend",
|
|
|
|
mVideoDecodeSuspended ? 'T' : 'F');
|
|
|
|
|
2017-03-10 11:17:46 +03:00
|
|
|
// Set new video decode mode.
|
|
|
|
mVideoDecodeMode = aMode;
|
|
|
|
|
|
|
|
// Start timer to trigger suspended video decoding.
|
|
|
|
if (mVideoDecodeMode == VideoDecodeMode::Suspend) {
|
|
|
|
TimeStamp target = TimeStamp::Now() + SuspendBackgroundVideoDelay();
|
|
|
|
|
|
|
|
RefPtr<MediaDecoderStateMachine> self = this;
|
|
|
|
mVideoDecodeSuspendTimer.Ensure(target,
|
|
|
|
[=]() { self->OnSuspendTimerResolved(); },
|
|
|
|
[] () { MOZ_DIAGNOSTIC_ASSERT(false); });
|
|
|
|
mOnPlaybackEvent.Notify(MediaEventType::StartVideoSuspendTimer);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Resuming from suspended decoding
|
|
|
|
|
|
|
|
// If suspend timer exists, destroy it.
|
|
|
|
CancelSuspendTimer();
|
|
|
|
|
|
|
|
if (mVideoDecodeSuspended) {
|
2017-07-03 10:14:53 +03:00
|
|
|
const auto target = mMediaSink->IsStarted() ? GetClock() : GetMediaTime();
|
|
|
|
mStateObj->HandleResumeVideoDecoding(target + detail::RESUME_VIDEO_PREMIUM);
|
2017-03-08 16:28:01 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-06-18 00:22:10 +03:00
|
|
|
void MediaDecoderStateMachine::BufferedRangeUpdated()
|
2012-01-06 10:40:51 +04:00
|
|
|
{
|
2015-06-17 01:37:48 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2012-01-06 10:40:51 +04:00
|
|
|
|
2017-08-01 08:54:14 +03:00
|
|
|
// While playing an unseekable stream of unknown duration, mDuration
|
|
|
|
// is updated as we play. But if data is being downloaded
|
|
|
|
// faster than played, mDuration won't reflect the end of playable data
|
2012-01-06 10:40:51 +04:00
|
|
|
// since we haven't played the frame at the end of buffered data. So update
|
2017-08-01 08:54:14 +03:00
|
|
|
// mDuration here as new data is downloaded to prevent such a lag.
|
|
|
|
if (mBuffered.Ref().IsInvalid()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool exists;
|
|
|
|
media::TimeUnit end{ mBuffered.Ref().GetEnd(&exists) };
|
|
|
|
if (!exists) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Use estimated duration from buffer ranges when mDuration is unknown or
|
|
|
|
// the estimated duration is larger.
|
|
|
|
if (mDuration.Ref().isNothing() || mDuration.Ref()->IsInfinite() ||
|
|
|
|
end > mDuration.Ref().ref()) {
|
|
|
|
mDuration = Some(end);
|
2012-01-06 10:40:51 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-10-18 08:24:48 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise>
|
2016-11-13 08:23:39 +03:00
|
|
|
MediaDecoderStateMachine::Seek(const SeekTarget& aTarget)
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2012-11-30 17:17:54 +04:00
|
|
|
|
2016-02-04 07:31:21 +03:00
|
|
|
// We need to be able to seek in some way
|
|
|
|
if (!mMediaSeekable && !mMediaSeekableOnlyInBufferedRanges) {
|
2017-03-24 06:38:22 +03:00
|
|
|
LOGW("Seek() should not be called on a non-seekable media");
|
2017-01-27 15:20:37 +03:00
|
|
|
return MediaDecoder::SeekPromise::CreateAndReject(/* aIgnored = */ true,
|
|
|
|
__func__);
|
2012-11-30 17:17:54 +04:00
|
|
|
}
|
2014-11-06 12:52:44 +03:00
|
|
|
|
2016-05-24 18:05:35 +03:00
|
|
|
if (aTarget.IsNextFrame() && !HasVideo()) {
|
2017-03-24 06:38:22 +03:00
|
|
|
LOGW("Ignore a NextFrameSeekTask on a media file without video track.");
|
2017-01-27 15:20:37 +03:00
|
|
|
return MediaDecoder::SeekPromise::CreateAndReject(/* aIgnored = */ true,
|
|
|
|
__func__);
|
2016-05-24 18:05:35 +03:00
|
|
|
}
|
|
|
|
|
2016-08-31 09:54:24 +03:00
|
|
|
MOZ_ASSERT(mDuration.Ref().isSome(), "We should have got duration already");
|
2014-11-06 12:52:44 +03:00
|
|
|
|
2016-10-04 11:33:40 +03:00
|
|
|
return mStateObj->HandleSeek(aTarget);
|
2015-02-20 22:05:56 +03:00
|
|
|
}
|
|
|
|
|
2015-10-18 08:24:48 +03:00
|
|
|
RefPtr<MediaDecoder::SeekPromise>
|
2016-11-13 08:23:39 +03:00
|
|
|
MediaDecoderStateMachine::InvokeSeek(const SeekTarget& aTarget)
|
2015-09-21 09:01:45 +03:00
|
|
|
{
|
2017-04-30 10:37:19 +03:00
|
|
|
return InvokeAsync(
|
2016-11-13 08:23:39 +03:00
|
|
|
OwnerThread(), this, __func__,
|
|
|
|
&MediaDecoderStateMachine::Seek, aTarget);
|
2015-09-21 09:01:45 +03:00
|
|
|
}
|
|
|
|
|
2015-09-09 05:12:23 +03:00
|
|
|
void MediaDecoderStateMachine::StopMediaSink()
|
2011-07-12 07:39:10 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-09 05:12:23 +03:00
|
|
|
if (mMediaSink->IsStarted()) {
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("Stop MediaSink");
|
2016-05-12 09:18:53 +03:00
|
|
|
mAudibleListener.DisconnectIfExists();
|
|
|
|
|
2015-09-09 05:12:23 +03:00
|
|
|
mMediaSink->Stop();
|
2015-10-19 12:32:16 +03:00
|
|
|
mMediaSinkAudioPromise.DisconnectIfExists();
|
|
|
|
mMediaSinkVideoPromise.DisconnectIfExists();
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-07-20 09:28:29 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::RequestAudioData()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-12-21 12:21:21 +03:00
|
|
|
MOZ_ASSERT(IsAudioDecoding());
|
|
|
|
MOZ_ASSERT(!IsRequestingAudioData());
|
|
|
|
MOZ_ASSERT(!IsWaitingAudioData());
|
2017-07-26 23:03:57 +03:00
|
|
|
LOGV("Queueing audio task - queued=%zu, decoder-queued=%zu",
|
2017-03-24 06:58:37 +03:00
|
|
|
AudioQueue().GetSize(), mReader->SizeOfAudioQueueInFrames());
|
2015-02-05 04:12:51 +03:00
|
|
|
|
2017-02-04 00:57:49 +03:00
|
|
|
RefPtr<MediaDecoderStateMachine> self = this;
|
2017-01-11 11:33:29 +03:00
|
|
|
mReader->RequestAudioData()->Then(
|
|
|
|
OwnerThread(), __func__,
|
2017-05-11 03:30:39 +03:00
|
|
|
[this, self] (RefPtr<AudioData> aAudio) {
|
2017-01-11 11:33:29 +03:00
|
|
|
MOZ_ASSERT(aAudio);
|
|
|
|
mAudioDataRequest.Complete();
|
|
|
|
// audio->GetEndTime() is not always mono-increasing in chained ogg.
|
2017-03-28 12:20:17 +03:00
|
|
|
mDecodedAudioEndTime = std::max(
|
2017-04-14 09:14:08 +03:00
|
|
|
aAudio->GetEndTime(), mDecodedAudioEndTime);
|
2017-04-14 12:13:36 +03:00
|
|
|
LOGV("OnAudioDecoded [%" PRId64 ",%" PRId64 "]",
|
|
|
|
aAudio->mTime.ToMicroseconds(),
|
2017-04-14 09:14:08 +03:00
|
|
|
aAudio->GetEndTime().ToMicroseconds());
|
2017-01-11 11:33:29 +03:00
|
|
|
mStateObj->HandleAudioDecoded(aAudio);
|
|
|
|
},
|
2017-02-04 00:57:49 +03:00
|
|
|
[this, self] (const MediaResult& aError) {
|
2017-03-24 06:58:37 +03:00
|
|
|
LOGV("OnAudioNotDecoded aError=%" PRIu32, static_cast<uint32_t>(aError.Code()));
|
2017-01-11 11:33:29 +03:00
|
|
|
mAudioDataRequest.Complete();
|
|
|
|
switch (aError.Code()) {
|
|
|
|
case NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA:
|
|
|
|
mStateObj->HandleWaitingForAudio();
|
|
|
|
break;
|
|
|
|
case NS_ERROR_DOM_MEDIA_CANCELED:
|
|
|
|
mStateObj->HandleAudioCanceled();
|
|
|
|
break;
|
|
|
|
case NS_ERROR_DOM_MEDIA_END_OF_STREAM:
|
|
|
|
mStateObj->HandleEndOfAudio();
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
DecodeError(aError);
|
|
|
|
}
|
|
|
|
})->Track(mAudioDataRequest);
|
2014-03-11 07:44:09 +04:00
|
|
|
}
|
2014-03-11 07:44:09 +04:00
|
|
|
|
2015-07-20 09:28:29 +03:00
|
|
|
void
|
2017-06-12 11:06:11 +03:00
|
|
|
MediaDecoderStateMachine::RequestVideoData(const media::TimeUnit& aCurrentTime)
|
2015-07-20 09:28:29 +03:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-12-21 12:21:21 +03:00
|
|
|
MOZ_ASSERT(IsVideoDecoding());
|
|
|
|
MOZ_ASSERT(!IsRequestingVideoData());
|
|
|
|
MOZ_ASSERT(!IsWaitingVideoData());
|
2017-07-26 23:03:57 +03:00
|
|
|
LOGV("Queueing video task - queued=%zu, decoder-queued=%zo"
|
2017-06-12 11:06:11 +03:00
|
|
|
", stime=%" PRId64,
|
2017-03-24 06:58:37 +03:00
|
|
|
VideoQueue().GetSize(), mReader->SizeOfVideoQueueInFrames(),
|
2017-06-12 11:06:11 +03:00
|
|
|
aCurrentTime.ToMicroseconds());
|
2016-12-20 08:17:05 +03:00
|
|
|
|
|
|
|
TimeStamp videoDecodeStartTime = TimeStamp::Now();
|
2017-02-04 00:57:49 +03:00
|
|
|
RefPtr<MediaDecoderStateMachine> self = this;
|
2017-06-12 11:06:11 +03:00
|
|
|
mReader->RequestVideoData(aCurrentTime)->Then(
|
2017-01-11 11:33:29 +03:00
|
|
|
OwnerThread(), __func__,
|
2017-05-11 03:30:39 +03:00
|
|
|
[this, self, videoDecodeStartTime] (RefPtr<VideoData> aVideo) {
|
2017-01-11 11:33:29 +03:00
|
|
|
MOZ_ASSERT(aVideo);
|
|
|
|
mVideoDataRequest.Complete();
|
|
|
|
// Handle abnormal or negative timestamps.
|
2017-03-28 12:20:17 +03:00
|
|
|
mDecodedVideoEndTime = std::max(
|
2017-04-14 09:14:08 +03:00
|
|
|
mDecodedVideoEndTime, aVideo->GetEndTime());
|
2017-04-14 12:13:36 +03:00
|
|
|
LOGV("OnVideoDecoded [%" PRId64 ",%" PRId64 "]",
|
|
|
|
aVideo->mTime.ToMicroseconds(),
|
2017-04-14 09:14:08 +03:00
|
|
|
aVideo->GetEndTime().ToMicroseconds());
|
2017-01-11 11:33:29 +03:00
|
|
|
mStateObj->HandleVideoDecoded(aVideo, videoDecodeStartTime);
|
|
|
|
},
|
2017-02-04 00:57:49 +03:00
|
|
|
[this, self] (const MediaResult& aError) {
|
2017-03-24 06:58:37 +03:00
|
|
|
LOGV("OnVideoNotDecoded aError=%" PRIu32 , static_cast<uint32_t>(aError.Code()));
|
2017-01-11 11:33:29 +03:00
|
|
|
mVideoDataRequest.Complete();
|
|
|
|
switch (aError.Code()) {
|
|
|
|
case NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA:
|
|
|
|
mStateObj->HandleWaitingForVideo();
|
|
|
|
break;
|
|
|
|
case NS_ERROR_DOM_MEDIA_CANCELED:
|
|
|
|
mStateObj->HandleVideoCanceled();
|
|
|
|
break;
|
|
|
|
case NS_ERROR_DOM_MEDIA_END_OF_STREAM:
|
|
|
|
mStateObj->HandleEndOfVideo();
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
DecodeError(aError);
|
|
|
|
}
|
|
|
|
})->Track(mVideoDataRequest);
|
2012-01-19 22:30:29 +04:00
|
|
|
}
|
2011-11-08 05:38:17 +04:00
|
|
|
|
2016-12-20 09:31:29 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::WaitForData(MediaData::Type aType)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
MOZ_ASSERT(aType == MediaData::AUDIO_DATA || aType == MediaData::VIDEO_DATA);
|
2017-02-04 00:57:49 +03:00
|
|
|
RefPtr<MediaDecoderStateMachine> self = this;
|
2016-12-20 09:31:29 +03:00
|
|
|
if (aType == MediaData::AUDIO_DATA) {
|
2017-01-11 11:33:29 +03:00
|
|
|
mReader->WaitForData(MediaData::AUDIO_DATA)->Then(
|
|
|
|
OwnerThread(), __func__,
|
2017-02-04 00:57:49 +03:00
|
|
|
[self] (MediaData::Type aType) {
|
|
|
|
self->mAudioWaitRequest.Complete();
|
2017-01-11 11:33:29 +03:00
|
|
|
MOZ_ASSERT(aType == MediaData::AUDIO_DATA);
|
2017-02-04 00:57:49 +03:00
|
|
|
self->mStateObj->HandleAudioWaited(aType);
|
2017-01-11 11:33:29 +03:00
|
|
|
},
|
2017-02-04 00:57:49 +03:00
|
|
|
[self] (const WaitForDataRejectValue& aRejection) {
|
|
|
|
self->mAudioWaitRequest.Complete();
|
|
|
|
self->DecodeError(NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA);
|
2017-01-11 11:33:29 +03:00
|
|
|
})->Track(mAudioWaitRequest);
|
2016-12-20 09:31:29 +03:00
|
|
|
} else {
|
2017-01-11 11:33:29 +03:00
|
|
|
mReader->WaitForData(MediaData::VIDEO_DATA)->Then(
|
|
|
|
OwnerThread(), __func__,
|
2017-02-04 00:57:49 +03:00
|
|
|
[self] (MediaData::Type aType) {
|
|
|
|
self->mVideoWaitRequest.Complete();
|
2017-01-11 11:33:29 +03:00
|
|
|
MOZ_ASSERT(aType == MediaData::VIDEO_DATA);
|
2017-02-04 00:57:49 +03:00
|
|
|
self->mStateObj->HandleVideoWaited(aType);
|
2017-01-11 11:33:29 +03:00
|
|
|
},
|
2017-02-04 00:57:49 +03:00
|
|
|
[self] (const WaitForDataRejectValue& aRejection) {
|
|
|
|
self->mVideoWaitRequest.Complete();
|
|
|
|
self->DecodeError(NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA);
|
2017-01-11 11:33:29 +03:00
|
|
|
})->Track(mVideoWaitRequest);
|
2016-12-20 09:31:29 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-07-22 04:54:06 +03:00
|
|
|
void
|
2015-09-09 05:12:23 +03:00
|
|
|
MediaDecoderStateMachine::StartMediaSink()
|
2011-07-12 07:39:10 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-09 05:12:23 +03:00
|
|
|
if (!mMediaSink->IsStarted()) {
|
2013-12-11 09:03:30 +04:00
|
|
|
mAudioCompleted = false;
|
2017-03-28 11:16:56 +03:00
|
|
|
mMediaSink->Start(GetMediaTime(), Info());
|
2015-07-22 04:54:06 +03:00
|
|
|
|
2015-10-19 12:32:16 +03:00
|
|
|
auto videoPromise = mMediaSink->OnEnded(TrackInfo::kVideoTrack);
|
|
|
|
auto audioPromise = mMediaSink->OnEnded(TrackInfo::kAudioTrack);
|
|
|
|
|
|
|
|
if (audioPromise) {
|
2017-01-11 11:33:29 +03:00
|
|
|
audioPromise->Then(
|
2015-07-22 04:54:06 +03:00
|
|
|
OwnerThread(), __func__, this,
|
2015-10-19 12:32:16 +03:00
|
|
|
&MediaDecoderStateMachine::OnMediaSinkAudioComplete,
|
2017-01-11 11:33:29 +03:00
|
|
|
&MediaDecoderStateMachine::OnMediaSinkAudioError)
|
|
|
|
->Track(mMediaSinkAudioPromise);
|
2015-10-19 12:32:16 +03:00
|
|
|
}
|
|
|
|
if (videoPromise) {
|
2017-01-11 11:33:29 +03:00
|
|
|
videoPromise->Then(
|
2015-10-19 12:32:16 +03:00
|
|
|
OwnerThread(), __func__, this,
|
|
|
|
&MediaDecoderStateMachine::OnMediaSinkVideoComplete,
|
2017-01-11 11:33:29 +03:00
|
|
|
&MediaDecoderStateMachine::OnMediaSinkVideoError)
|
|
|
|
->Track(mMediaSinkVideoPromise);
|
2015-09-07 06:56:52 +03:00
|
|
|
}
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-21 11:27:40 +03:00
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::HasLowDecodedAudio()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-09-04 12:27:43 +03:00
|
|
|
return IsAudioDecoding() && GetDecodedAudioDuration()
|
|
|
|
< EXHAUSTED_DATA_MARGIN.MultDouble(mPlaybackRate);
|
2016-09-21 11:27:40 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::HasLowDecodedVideo()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-09-04 12:27:43 +03:00
|
|
|
return IsVideoDecoding() &&
|
|
|
|
VideoQueue().GetSize() < LOW_VIDEO_FRAMES * mPlaybackRate;
|
2016-09-21 11:27:40 +03:00
|
|
|
}
|
|
|
|
|
2016-09-21 11:20:38 +03:00
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::HasLowDecodedData()
|
2011-01-18 03:53:18 +03:00
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-04-20 09:45:41 +03:00
|
|
|
MOZ_ASSERT(mReader->UseBufferingHeuristics());
|
2016-09-21 11:27:40 +03:00
|
|
|
return HasLowDecodedAudio() || HasLowDecodedVideo();
|
2011-01-18 03:53:18 +03:00
|
|
|
}
|
2010-09-15 03:24:47 +04:00
|
|
|
|
2015-02-27 00:37:03 +03:00
|
|
|
bool MediaDecoderStateMachine::OutOfDecodedAudio()
|
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-09-04 12:27:43 +03:00
|
|
|
return IsAudioDecoding() && !AudioQueue().IsFinished() &&
|
|
|
|
AudioQueue().GetSize() == 0 &&
|
|
|
|
!mMediaSink->HasUnplayedFrames(TrackInfo::kAudioTrack);
|
2015-02-27 00:37:03 +03:00
|
|
|
}
|
|
|
|
|
2017-03-28 10:09:30 +03:00
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::HasLowBufferedData()
|
2011-03-24 01:28:57 +03:00
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-28 10:09:30 +03:00
|
|
|
return HasLowBufferedData(detail::LOW_BUFFER_THRESHOLD);
|
2011-03-24 01:28:57 +03:00
|
|
|
}
|
|
|
|
|
2017-03-28 10:09:30 +03:00
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::HasLowBufferedData(const TimeUnit& aThreshold)
|
2011-03-24 01:28:57 +03:00
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2011-03-24 01:28:57 +03:00
|
|
|
|
2015-06-18 00:22:10 +03:00
|
|
|
// If we don't have a duration, mBuffered is probably not going to have
|
2015-02-12 21:48:14 +03:00
|
|
|
// a useful buffered range. Return false here so that we don't get stuck in
|
|
|
|
// buffering mode for live streams.
|
2015-06-08 21:30:59 +03:00
|
|
|
if (Duration().IsInfinite()) {
|
2015-02-12 21:48:14 +03:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2015-06-18 00:22:10 +03:00
|
|
|
if (mBuffered.Ref().IsInvalid()) {
|
2015-05-18 09:15:47 +03:00
|
|
|
return false;
|
|
|
|
}
|
2014-11-05 14:57:43 +03:00
|
|
|
|
2016-01-27 10:12:49 +03:00
|
|
|
// We are never low in decoded data when we don't have audio/video or have
|
|
|
|
// decoded all audio/video samples.
|
2017-03-28 10:09:30 +03:00
|
|
|
TimeUnit endOfDecodedVideo = (HasVideo() && !VideoQueue().IsFinished())
|
2017-03-28 12:20:17 +03:00
|
|
|
? mDecodedVideoEndTime : TimeUnit::FromInfinity();
|
2017-03-28 10:09:30 +03:00
|
|
|
TimeUnit endOfDecodedAudio = (HasAudio() && !AudioQueue().IsFinished())
|
2017-03-28 12:20:17 +03:00
|
|
|
? mDecodedAudioEndTime : TimeUnit::FromInfinity();
|
2017-03-28 10:09:30 +03:00
|
|
|
|
|
|
|
auto endOfDecodedData = std::min(endOfDecodedVideo, endOfDecodedAudio);
|
|
|
|
if (Duration() < endOfDecodedData) {
|
2015-05-18 09:15:47 +03:00
|
|
|
// Our duration is not up to date. No point buffering.
|
|
|
|
return false;
|
|
|
|
}
|
2016-09-12 11:20:11 +03:00
|
|
|
|
2017-03-28 10:09:30 +03:00
|
|
|
if (endOfDecodedData.IsInfinite()) {
|
2016-09-12 11:20:11 +03:00
|
|
|
// Have decoded all samples. No point buffering.
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2017-03-28 10:09:30 +03:00
|
|
|
auto start = endOfDecodedData;
|
2017-03-28 11:16:56 +03:00
|
|
|
auto end = std::min(GetMediaTime() + aThreshold, Duration());
|
2016-09-28 17:43:49 +03:00
|
|
|
if (start >= end) {
|
|
|
|
// Duration of decoded samples is greater than our threshold.
|
|
|
|
return false;
|
|
|
|
}
|
2017-03-28 10:09:30 +03:00
|
|
|
media::TimeInterval interval(start, end);
|
2016-09-12 11:20:11 +03:00
|
|
|
return !mBuffered.Ref().Contains(interval);
|
2011-03-24 01:28:57 +03:00
|
|
|
}
|
|
|
|
|
2014-03-11 07:44:09 +04:00
|
|
|
void
|
2016-09-10 17:51:13 +03:00
|
|
|
MediaDecoderStateMachine::DecodeError(const MediaResult& aError)
|
2014-03-11 07:44:09 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-24 06:38:22 +03:00
|
|
|
LOGW("Decode error");
|
2016-08-26 08:41:45 +03:00
|
|
|
// Notify the decode error and MediaDecoder will shut down MDSM.
|
2016-09-10 17:51:13 +03:00
|
|
|
mOnPlaybackErrorEvent.Notify(aError);
|
2014-03-11 07:44:09 +04:00
|
|
|
}
|
|
|
|
|
2015-01-16 18:56:19 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::EnqueueFirstFrameLoadedEvent()
|
|
|
|
{
|
2015-04-07 22:11:41 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-03-09 05:32:49 +03:00
|
|
|
// Track value of mSentFirstFrameLoadedEvent from before updating it
|
|
|
|
bool firstFrameBeenLoaded = mSentFirstFrameLoadedEvent;
|
2015-01-16 18:56:19 +03:00
|
|
|
mSentFirstFrameLoadedEvent = true;
|
2016-11-25 11:22:55 +03:00
|
|
|
MediaDecoderEventVisibility visibility =
|
|
|
|
firstFrameBeenLoaded ? MediaDecoderEventVisibility::Suppressed
|
|
|
|
: MediaDecoderEventVisibility::Observable;
|
|
|
|
mFirstFrameLoadedEvent.Notify(
|
|
|
|
nsAutoPtr<MediaInfo>(new MediaInfo(Info())), visibility);
|
2014-11-06 12:52:44 +03:00
|
|
|
}
|
|
|
|
|
2015-07-25 11:25:13 +03:00
|
|
|
void
|
2014-11-06 12:52:44 +03:00
|
|
|
MediaDecoderStateMachine::FinishDecodeFirstFrame()
|
2014-06-18 09:07:02 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-08-31 07:54:50 +03:00
|
|
|
MOZ_ASSERT(!mSentFirstFrameLoadedEvent);
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("FinishDecodeFirstFrame");
|
2014-06-18 09:07:02 +04:00
|
|
|
|
2016-10-06 06:25:24 +03:00
|
|
|
mMediaSink->Redraw(Info().mVideo);
|
2014-06-18 09:07:02 +04:00
|
|
|
|
2017-08-17 06:52:29 +03:00
|
|
|
LOG("Media duration %" PRId64 ", mediaSeekable=%d",
|
|
|
|
Duration().ToMicroseconds(), mMediaSeekable);
|
2011-07-12 07:39:23 +04:00
|
|
|
|
2014-11-06 12:52:44 +03:00
|
|
|
// Get potentially updated metadata
|
2016-10-06 06:25:24 +03:00
|
|
|
mReader->ReadUpdatedMetadata(mInfo.ptr());
|
2014-11-06 12:52:44 +03:00
|
|
|
|
2016-08-30 11:48:09 +03:00
|
|
|
EnqueueFirstFrameLoadedEvent();
|
2011-07-12 07:39:23 +04:00
|
|
|
}
|
|
|
|
|
2015-12-03 10:59:44 +03:00
|
|
|
RefPtr<ShutdownPromise>
|
|
|
|
MediaDecoderStateMachine::BeginShutdown()
|
2015-09-07 14:32:16 +03:00
|
|
|
{
|
2015-12-03 10:59:44 +03:00
|
|
|
return InvokeAsync(OwnerThread(), this, __func__,
|
|
|
|
&MediaDecoderStateMachine::Shutdown);
|
2015-09-07 14:32:16 +03:00
|
|
|
}
|
|
|
|
|
2015-12-03 10:59:44 +03:00
|
|
|
RefPtr<ShutdownPromise>
|
2014-12-13 01:22:23 +03:00
|
|
|
MediaDecoderStateMachine::FinishShutdown()
|
2014-12-09 22:43:21 +03:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("Shutting down state machine task queue");
|
2015-12-03 10:59:44 +03:00
|
|
|
return OwnerThread()->BeginShutdown();
|
2014-12-09 22:43:21 +03:00
|
|
|
}
|
|
|
|
|
2016-08-31 10:46:59 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::RunStateMachine()
|
2010-04-02 07:03:07 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-03-10 19:59:30 +03:00
|
|
|
|
2015-03-18 00:23:12 +03:00
|
|
|
mDelayedScheduler.Reset(); // Must happen on state machine task queue.
|
2015-03-10 19:59:30 +03:00
|
|
|
mDispatchedStateMachine = false;
|
2016-09-06 06:42:28 +03:00
|
|
|
mStateObj->Step();
|
2016-09-05 10:48:37 +03:00
|
|
|
}
|
2011-07-12 07:39:37 +04:00
|
|
|
|
2014-06-30 03:23:04 +04:00
|
|
|
void
|
2016-12-22 11:56:59 +03:00
|
|
|
MediaDecoderStateMachine::ResetDecode(TrackSet aTracks)
|
2014-06-30 03:23:04 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("MediaDecoderStateMachine::Reset");
|
2014-06-30 03:23:04 +04:00
|
|
|
|
2016-05-31 07:32:37 +03:00
|
|
|
// Assert that aTracks specifies to reset the video track because we
|
|
|
|
// don't currently support resetting just the audio track.
|
|
|
|
MOZ_ASSERT(aTracks.contains(TrackInfo::kVideoTrack));
|
2014-06-30 03:23:04 +04:00
|
|
|
|
2016-05-31 07:32:37 +03:00
|
|
|
if (aTracks.contains(TrackInfo::kVideoTrack)) {
|
2017-03-28 12:20:17 +03:00
|
|
|
mDecodedVideoEndTime = TimeUnit::Zero();
|
2016-05-31 07:32:37 +03:00
|
|
|
mVideoCompleted = false;
|
|
|
|
VideoQueue().Reset();
|
2016-12-20 08:17:05 +03:00
|
|
|
mVideoDataRequest.DisconnectIfExists();
|
2016-12-20 09:31:29 +03:00
|
|
|
mVideoWaitRequest.DisconnectIfExists();
|
2016-05-31 07:32:37 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
if (aTracks.contains(TrackInfo::kAudioTrack)) {
|
2017-03-28 12:20:17 +03:00
|
|
|
mDecodedAudioEndTime = TimeUnit::Zero();
|
2016-04-18 09:33:52 +03:00
|
|
|
mAudioCompleted = false;
|
|
|
|
AudioQueue().Reset();
|
2016-12-20 07:54:23 +03:00
|
|
|
mAudioDataRequest.DisconnectIfExists();
|
2016-12-20 09:31:29 +03:00
|
|
|
mAudioWaitRequest.DisconnectIfExists();
|
2016-04-18 09:33:52 +03:00
|
|
|
}
|
2015-02-17 18:50:49 +03:00
|
|
|
|
2015-09-15 05:05:19 +03:00
|
|
|
mPlaybackOffset = 0;
|
|
|
|
|
2016-05-31 07:32:37 +03:00
|
|
|
mReader->ResetDecode(aTracks);
|
2015-02-17 18:50:49 +03:00
|
|
|
}
|
|
|
|
|
2017-03-28 11:35:24 +03:00
|
|
|
media::TimeUnit
|
2015-09-10 05:19:42 +03:00
|
|
|
MediaDecoderStateMachine::GetClock(TimeStamp* aTimeStamp) const
|
2013-12-12 16:33:01 +04:00
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-28 11:16:56 +03:00
|
|
|
auto clockTime = mMediaSink->GetPosition(aTimeStamp);
|
2015-09-10 05:19:42 +03:00
|
|
|
NS_ASSERTION(GetMediaTime() <= clockTime, "Clock should go forwards.");
|
2017-03-28 11:35:24 +03:00
|
|
|
return clockTime;
|
2012-11-22 14:38:28 +04:00
|
|
|
}
|
|
|
|
|
2015-10-19 13:08:11 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::UpdatePlaybackPositionPeriodically()
|
2012-11-22 14:38:28 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2011-03-24 01:28:57 +03:00
|
|
|
|
2016-04-27 07:51:57 +03:00
|
|
|
if (!IsPlaying()) {
|
2012-11-22 14:38:28 +04:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-03-03 05:43:38 +03:00
|
|
|
// Cap the current time to the larger of the audio and video end time.
|
|
|
|
// This ensures that if we're running off the system clock, we don't
|
|
|
|
// advance the clock to after the media end time.
|
2017-03-28 10:50:53 +03:00
|
|
|
if (VideoEndTime() > TimeUnit::Zero() || AudioEndTime() > TimeUnit::Zero()) {
|
2015-10-19 13:08:11 +03:00
|
|
|
|
2017-03-28 11:35:24 +03:00
|
|
|
const auto clockTime = GetClock();
|
2015-10-19 13:08:11 +03:00
|
|
|
// Skip frames up to the frame at the playback position, and figure out
|
|
|
|
// the time remaining until it's time to display the next frame and drop
|
|
|
|
// the current frame.
|
2017-03-28 11:16:56 +03:00
|
|
|
NS_ASSERTION(clockTime >= TimeUnit::Zero(), "Should have positive clock time.");
|
2015-10-19 13:08:11 +03:00
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
// These will be non -1 if we've displayed a video frame, or played an audio
|
|
|
|
// frame.
|
2017-03-28 10:50:53 +03:00
|
|
|
auto maxEndTime = std::max(VideoEndTime(), AudioEndTime());
|
2017-03-28 11:16:56 +03:00
|
|
|
auto t = std::min(clockTime, maxEndTime);
|
2015-03-03 05:43:38 +03:00
|
|
|
// FIXME: Bug 1091422 - chained ogg files hit this assertion.
|
|
|
|
//MOZ_ASSERT(t >= GetMediaTime());
|
|
|
|
if (t > GetMediaTime()) {
|
2017-03-28 11:16:56 +03:00
|
|
|
UpdatePlaybackPosition(t);
|
2015-03-03 05:43:38 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// Note we have to update playback position before releasing the monitor.
|
|
|
|
// Otherwise, MediaDecoder::AddOutputStream could kick in when we are outside
|
|
|
|
// the monitor and get a staled value from GetCurrentTimeUs() which hits the
|
|
|
|
// assertion in GetClock().
|
|
|
|
|
2015-10-19 13:08:11 +03:00
|
|
|
int64_t delay = std::max<int64_t>(1, AUDIO_DURATION_USECS / mPlaybackRate);
|
2017-03-28 12:10:59 +03:00
|
|
|
ScheduleStateMachineIn(TimeUnit::FromMicroseconds(delay));
|
2010-04-02 07:03:07 +04:00
|
|
|
}
|
|
|
|
|
2016-11-03 12:09:10 +03:00
|
|
|
/* static */ const char*
|
|
|
|
MediaDecoderStateMachine::ToStr(NextFrameStatus aStatus)
|
2015-04-30 03:07:55 +03:00
|
|
|
{
|
2016-11-03 12:09:10 +03:00
|
|
|
switch (aStatus) {
|
2017-01-27 15:20:37 +03:00
|
|
|
case MediaDecoderOwner::NEXT_FRAME_AVAILABLE:
|
|
|
|
return "NEXT_FRAME_AVAILABLE";
|
|
|
|
case MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE:
|
|
|
|
return "NEXT_FRAME_UNAVAILABLE";
|
|
|
|
case MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING:
|
|
|
|
return "NEXT_FRAME_UNAVAILABLE_BUFFERING";
|
|
|
|
case MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_SEEKING:
|
|
|
|
return "NEXT_FRAME_UNAVAILABLE_SEEKING";
|
|
|
|
case MediaDecoderOwner::NEXT_FRAME_UNINITIALIZED:
|
|
|
|
return "NEXT_FRAME_UNINITIALIZED";
|
2012-12-19 08:48:32 +04:00
|
|
|
}
|
2016-11-03 12:09:10 +03:00
|
|
|
return "UNKNOWN";
|
|
|
|
}
|
2015-04-01 04:44:36 +03:00
|
|
|
|
2016-11-03 12:09:10 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::UpdateNextFrameStatus(NextFrameStatus aStatus)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
if (aStatus != mNextFrameStatus) {
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("Changed mNextFrameStatus to %s", ToStr(aStatus));
|
2016-11-03 12:09:10 +03:00
|
|
|
mNextFrameStatus = aStatus;
|
2015-04-01 04:44:36 +03:00
|
|
|
}
|
2016-11-03 12:09:10 +03:00
|
|
|
}
|
2015-04-01 04:44:36 +03:00
|
|
|
|
2015-03-10 19:59:30 +03:00
|
|
|
void
|
2015-05-06 07:32:33 +03:00
|
|
|
MediaDecoderStateMachine::ScheduleStateMachine()
|
|
|
|
{
|
2015-05-06 04:17:51 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-03-10 19:59:30 +03:00
|
|
|
if (mDispatchedStateMachine) {
|
|
|
|
return;
|
2012-12-04 14:59:36 +04:00
|
|
|
}
|
2015-03-10 19:59:30 +03:00
|
|
|
mDispatchedStateMachine = true;
|
2012-12-04 14:59:36 +04:00
|
|
|
|
2017-01-27 15:20:37 +03:00
|
|
|
OwnerThread()->Dispatch(
|
2017-06-12 22:34:10 +03:00
|
|
|
NewRunnableMethod("MediaDecoderStateMachine::RunStateMachine",
|
|
|
|
this,
|
|
|
|
&MediaDecoderStateMachine::RunStateMachine));
|
2011-07-12 07:39:34 +04:00
|
|
|
}
|
|
|
|
|
2015-03-10 19:59:30 +03:00
|
|
|
void
|
2017-03-28 12:10:59 +03:00
|
|
|
MediaDecoderStateMachine::ScheduleStateMachineIn(const TimeUnit& aTime)
|
2011-07-12 07:39:34 +04:00
|
|
|
{
|
2017-01-27 15:20:37 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue()); // mDelayedScheduler.Ensure() may Disconnect()
|
|
|
|
// the promise, which must happen on the state
|
|
|
|
// machine task queue.
|
2017-03-28 12:10:59 +03:00
|
|
|
MOZ_ASSERT(aTime > TimeUnit::Zero());
|
2015-03-10 19:59:30 +03:00
|
|
|
if (mDispatchedStateMachine) {
|
|
|
|
return;
|
|
|
|
}
|
2015-03-10 19:59:30 +03:00
|
|
|
|
2017-03-28 12:10:59 +03:00
|
|
|
TimeStamp target = TimeStamp::Now() + aTime.ToTimeDuration();
|
2015-03-10 19:59:30 +03:00
|
|
|
|
2016-09-05 12:51:54 +03:00
|
|
|
// It is OK to capture 'this' without causing UAF because the callback
|
|
|
|
// always happens before shutdown.
|
2017-02-04 00:57:49 +03:00
|
|
|
RefPtr<MediaDecoderStateMachine> self = this;
|
|
|
|
mDelayedScheduler.Ensure(target, [self] () {
|
|
|
|
self->mDelayedScheduler.CompleteRequest();
|
|
|
|
self->RunStateMachine();
|
2016-09-05 12:51:54 +03:00
|
|
|
}, [] () {
|
|
|
|
MOZ_DIAGNOSTIC_ASSERT(false);
|
2015-10-13 10:39:01 +03:00
|
|
|
});
|
2011-07-12 07:39:32 +04:00
|
|
|
}
|
2011-11-08 05:38:17 +04:00
|
|
|
|
2015-03-18 00:23:12 +03:00
|
|
|
bool MediaDecoderStateMachine::OnTaskQueue() const
|
2011-11-08 05:38:17 +04:00
|
|
|
{
|
2015-07-16 21:31:21 +03:00
|
|
|
return OwnerThread()->IsCurrentThreadIn();
|
2011-11-08 05:38:17 +04:00
|
|
|
}
|
2012-11-14 23:45:33 +04:00
|
|
|
|
2014-07-11 11:11:00 +04:00
|
|
|
bool MediaDecoderStateMachine::IsStateMachineScheduled() const
|
2011-11-08 05:38:17 +04:00
|
|
|
{
|
2015-05-06 07:32:33 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-03-10 19:59:30 +03:00
|
|
|
return mDispatchedStateMachine || mDelayedScheduler.IsScheduled();
|
2011-11-08 05:38:17 +04:00
|
|
|
}
|
|
|
|
|
2015-05-01 21:39:42 +03:00
|
|
|
void
|
2016-09-21 13:06:49 +03:00
|
|
|
MediaDecoderStateMachine::SetPlaybackRate(double aPlaybackRate)
|
2012-11-22 14:38:28 +04:00
|
|
|
{
|
2015-05-01 21:39:42 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-09-21 13:06:49 +03:00
|
|
|
MOZ_ASSERT(aPlaybackRate != 0, "Should be handled by MediaDecoder::Pause()");
|
2012-11-22 14:38:28 +04:00
|
|
|
|
2016-09-21 13:06:49 +03:00
|
|
|
mPlaybackRate = aPlaybackRate;
|
2015-09-09 05:12:23 +03:00
|
|
|
mMediaSink->SetPlaybackRate(mPlaybackRate);
|
2015-03-30 05:14:03 +03:00
|
|
|
|
2016-09-29 06:34:54 +03:00
|
|
|
// Schedule next cycle to check if we can stop prerolling.
|
2015-03-30 05:14:03 +03:00
|
|
|
ScheduleStateMachine();
|
2012-11-22 14:38:28 +04:00
|
|
|
}
|
|
|
|
|
2015-05-02 00:27:29 +03:00
|
|
|
void MediaDecoderStateMachine::PreservesPitchChanged()
|
2012-11-22 14:38:28 +04:00
|
|
|
{
|
2015-05-02 00:27:29 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-09 05:12:23 +03:00
|
|
|
mMediaSink->SetPreservesPitch(mPreservesPitch);
|
2014-04-01 07:43:57 +04:00
|
|
|
}
|
2013-12-20 07:24:42 +04:00
|
|
|
|
2017-03-28 10:50:53 +03:00
|
|
|
TimeUnit
|
2015-07-16 05:12:52 +03:00
|
|
|
MediaDecoderStateMachine::AudioEndTime() const
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2015-09-09 05:12:23 +03:00
|
|
|
if (mMediaSink->IsStarted()) {
|
2017-03-28 10:50:53 +03:00
|
|
|
return mMediaSink->GetEndTime(TrackInfo::kAudioTrack);
|
2015-07-16 05:13:27 +03:00
|
|
|
}
|
2017-08-02 13:00:00 +03:00
|
|
|
return GetMediaTime();
|
2015-07-16 05:12:52 +03:00
|
|
|
}
|
|
|
|
|
2017-03-28 10:50:53 +03:00
|
|
|
TimeUnit
|
2015-10-19 13:08:11 +03:00
|
|
|
MediaDecoderStateMachine::VideoEndTime() const
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
if (mMediaSink->IsStarted()) {
|
2017-03-28 10:50:53 +03:00
|
|
|
return mMediaSink->GetEndTime(TrackInfo::kVideoTrack);
|
2015-10-19 13:08:11 +03:00
|
|
|
}
|
2017-08-02 13:00:00 +03:00
|
|
|
return GetMediaTime();
|
2015-10-19 13:08:11 +03:00
|
|
|
}
|
|
|
|
|
2015-10-19 12:32:16 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::OnMediaSinkVideoComplete()
|
2013-12-11 09:03:30 +04:00
|
|
|
{
|
2015-04-08 02:10:51 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-10-06 06:25:24 +03:00
|
|
|
MOZ_ASSERT(HasVideo());
|
2017-03-24 06:43:57 +03:00
|
|
|
LOG("[%s]", __func__);
|
2015-07-22 04:54:06 +03:00
|
|
|
|
2015-10-19 12:32:16 +03:00
|
|
|
mMediaSinkVideoPromise.Complete();
|
2015-12-22 03:42:38 +03:00
|
|
|
mVideoCompleted = true;
|
2015-10-19 12:32:16 +03:00
|
|
|
ScheduleStateMachine();
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::OnMediaSinkVideoError()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-10-06 06:25:24 +03:00
|
|
|
MOZ_ASSERT(HasVideo());
|
2017-03-24 06:43:57 +03:00
|
|
|
LOGW("[%s]", __func__);
|
2015-10-19 12:32:16 +03:00
|
|
|
|
|
|
|
mMediaSinkVideoPromise.Complete();
|
2015-12-22 03:42:38 +03:00
|
|
|
mVideoCompleted = true;
|
2015-10-19 12:32:16 +03:00
|
|
|
if (HasAudio()) {
|
|
|
|
return;
|
|
|
|
}
|
2016-09-10 17:51:13 +03:00
|
|
|
DecodeError(MediaResult(NS_ERROR_DOM_MEDIA_MEDIASINK_ERR, __func__));
|
2015-10-19 12:32:16 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::OnMediaSinkAudioComplete()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-10-06 06:25:24 +03:00
|
|
|
MOZ_ASSERT(HasAudio());
|
2017-03-24 06:43:57 +03:00
|
|
|
LOG("[%s]", __func__);
|
2015-10-19 12:32:16 +03:00
|
|
|
|
|
|
|
mMediaSinkAudioPromise.Complete();
|
2015-12-22 03:42:38 +03:00
|
|
|
mAudioCompleted = true;
|
2015-09-09 05:12:35 +03:00
|
|
|
// To notify PlaybackEnded as soon as possible.
|
|
|
|
ScheduleStateMachine();
|
2016-09-19 13:38:06 +03:00
|
|
|
|
|
|
|
// Report OK to Decoder Doctor (to know if issue may have been resolved).
|
|
|
|
mOnDecoderDoctorEvent.Notify(
|
|
|
|
DecoderDoctorEvent{DecoderDoctorEvent::eAudioSinkStartup, NS_OK});
|
2013-12-11 09:03:30 +04:00
|
|
|
}
|
|
|
|
|
2016-09-19 13:38:06 +03:00
|
|
|
void MediaDecoderStateMachine::OnMediaSinkAudioError(nsresult aResult)
|
2014-09-30 01:42:00 +04:00
|
|
|
{
|
2015-03-18 00:23:12 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-10-06 06:25:24 +03:00
|
|
|
MOZ_ASSERT(HasAudio());
|
2017-03-24 06:43:57 +03:00
|
|
|
LOGW("[%s]", __func__);
|
2014-09-30 01:42:00 +04:00
|
|
|
|
2015-10-19 12:32:16 +03:00
|
|
|
mMediaSinkAudioPromise.Complete();
|
2015-12-22 03:42:38 +03:00
|
|
|
mAudioCompleted = true;
|
2014-09-30 01:42:00 +04:00
|
|
|
|
2016-09-19 13:38:06 +03:00
|
|
|
// Result should never be NS_OK in this *error* handler. Report to Dec-Doc.
|
|
|
|
MOZ_ASSERT(NS_FAILED(aResult));
|
|
|
|
mOnDecoderDoctorEvent.Notify(
|
|
|
|
DecoderDoctorEvent{DecoderDoctorEvent::eAudioSinkStartup, aResult});
|
|
|
|
|
2014-10-07 17:55:00 +04:00
|
|
|
// Make the best effort to continue playback when there is video.
|
|
|
|
if (HasVideo()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise notify media decoder/element about this error for it makes
|
|
|
|
// no sense to play an audio-only file without sound output.
|
2016-09-10 17:51:13 +03:00
|
|
|
DecodeError(MediaResult(NS_ERROR_DOM_MEDIA_MEDIASINK_ERR, __func__));
|
2014-09-30 01:42:00 +04:00
|
|
|
}
|
|
|
|
|
2015-09-09 05:12:35 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::SetAudioCaptured(bool aCaptured)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
|
|
|
|
if (aCaptured == mAudioCaptured) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-01-13 01:13:00 +03:00
|
|
|
// Rest these flags so they are consistent with the status of the sink.
|
|
|
|
// TODO: Move these flags into MediaSink to improve cohesion so we don't need
|
|
|
|
// to reset these flags when switching MediaSinks.
|
|
|
|
mAudioCompleted = false;
|
|
|
|
mVideoCompleted = false;
|
|
|
|
|
2015-09-09 05:12:35 +03:00
|
|
|
// Backup current playback parameters.
|
|
|
|
MediaSink::PlaybackParams params = mMediaSink->GetPlaybackParams();
|
|
|
|
|
|
|
|
// Stop and shut down the existing sink.
|
|
|
|
StopMediaSink();
|
|
|
|
mMediaSink->Shutdown();
|
|
|
|
|
|
|
|
// Create a new sink according to whether audio is captured.
|
2015-10-19 12:32:16 +03:00
|
|
|
mMediaSink = CreateMediaSink(aCaptured);
|
2015-09-09 05:12:35 +03:00
|
|
|
|
|
|
|
// Restore playback parameters.
|
|
|
|
mMediaSink->SetPlaybackParams(params);
|
|
|
|
|
|
|
|
mAudioCaptured = aCaptured;
|
2016-03-19 13:29:44 +03:00
|
|
|
|
|
|
|
// Don't buffer as much when audio is captured because we don't need to worry
|
|
|
|
// about high latency audio devices.
|
2017-03-28 09:38:30 +03:00
|
|
|
mAmpleAudioThreshold = mAudioCaptured
|
2017-03-28 09:27:26 +03:00
|
|
|
? detail::AMPLE_AUDIO_THRESHOLD / 2 : detail::AMPLE_AUDIO_THRESHOLD;
|
2016-09-29 06:34:54 +03:00
|
|
|
|
2016-10-05 12:03:08 +03:00
|
|
|
mStateObj->HandleAudioCaptured();
|
2015-09-09 05:12:35 +03:00
|
|
|
}
|
|
|
|
|
2015-03-09 06:17:30 +03:00
|
|
|
uint32_t MediaDecoderStateMachine::GetAmpleVideoFrames() const
|
|
|
|
{
|
2015-04-30 03:07:55 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-07-19 09:24:35 +03:00
|
|
|
return mReader->VideoIsHardwareAccelerated()
|
2017-01-27 15:20:37 +03:00
|
|
|
? std::max<uint32_t>(sVideoQueueHWAccelSize, MIN_VIDEO_QUEUE_SIZE)
|
|
|
|
: std::max<uint32_t>(sVideoQueueDefaultSize, MIN_VIDEO_QUEUE_SIZE);
|
2015-03-09 06:17:30 +03:00
|
|
|
}
|
|
|
|
|
2017-01-18 12:55:59 +03:00
|
|
|
nsCString
|
|
|
|
MediaDecoderStateMachine::GetDebugInfo()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
|
|
|
return nsPrintfCString(
|
2017-03-01 06:03:01 +03:00
|
|
|
"MediaDecoderStateMachine State: GetMediaTime=%" PRId64 " GetClock="
|
|
|
|
"%" PRId64 " mMediaSink=%p state=%s mPlayState=%d "
|
|
|
|
"mSentFirstFrameLoadedEvent=%d IsPlaying=%d mAudioStatus=%s "
|
|
|
|
"mVideoStatus=%s mDecodedAudioEndTime=%" PRId64
|
|
|
|
" mDecodedVideoEndTime=%" PRId64 "mAudioCompleted=%d "
|
|
|
|
"mVideoCompleted=%d",
|
2017-03-28 11:16:56 +03:00
|
|
|
GetMediaTime().ToMicroseconds(),
|
2017-03-28 11:35:24 +03:00
|
|
|
mMediaSink->IsStarted() ? GetClock().ToMicroseconds() : -1,
|
2017-01-27 15:20:37 +03:00
|
|
|
mMediaSink.get(), ToStateStr(), mPlayState.Ref(),
|
|
|
|
mSentFirstFrameLoadedEvent, IsPlaying(), AudioRequestStatus(),
|
2017-03-28 12:20:17 +03:00
|
|
|
VideoRequestStatus(), mDecodedAudioEndTime.ToMicroseconds(),
|
|
|
|
mDecodedVideoEndTime.ToMicroseconds(),
|
2017-01-27 15:20:37 +03:00
|
|
|
mAudioCompleted, mVideoCompleted)
|
|
|
|
+ mStateObj->GetDebugInfo() + nsCString("\n")
|
|
|
|
+ mMediaSink->GetDebugInfo();
|
2017-01-18 12:55:59 +03:00
|
|
|
}
|
|
|
|
|
2017-01-19 09:18:41 +03:00
|
|
|
RefPtr<MediaDecoder::DebugInfoPromise>
|
|
|
|
MediaDecoderStateMachine::RequestDebugInfo()
|
|
|
|
{
|
|
|
|
using PromiseType = MediaDecoder::DebugInfoPromise;
|
|
|
|
RefPtr<PromiseType::Private> p = new PromiseType::Private(__func__);
|
2017-02-04 00:57:49 +03:00
|
|
|
RefPtr<MediaDecoderStateMachine> self = this;
|
2017-06-12 22:34:10 +03:00
|
|
|
OwnerThread()->Dispatch(
|
|
|
|
NS_NewRunnableFunction(
|
|
|
|
"MediaDecoderStateMachine::RequestDebugInfo",
|
|
|
|
[self, p]() { p->Resolve(self->GetDebugInfo(), __func__); }),
|
|
|
|
AbstractThread::AssertDispatchSuccess,
|
|
|
|
AbstractThread::TailDispatch);
|
2017-01-19 09:18:41 +03:00
|
|
|
return p.forget();
|
|
|
|
}
|
|
|
|
|
2015-05-28 09:17:30 +03:00
|
|
|
void MediaDecoderStateMachine::AddOutputStream(ProcessedMediaStream* aStream,
|
|
|
|
bool aFinishWhenEnded)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("AddOutputStream aStream=%p!", aStream);
|
2015-12-24 05:14:16 +03:00
|
|
|
mOutputStreamManager->Add(aStream, aFinishWhenEnded);
|
2017-06-12 22:34:10 +03:00
|
|
|
nsCOMPtr<nsIRunnable> r =
|
|
|
|
NewRunnableMethod<bool>("MediaDecoderStateMachine::SetAudioCaptured",
|
|
|
|
this,
|
|
|
|
&MediaDecoderStateMachine::SetAudioCaptured,
|
|
|
|
true);
|
2015-09-09 05:12:35 +03:00
|
|
|
OwnerThread()->Dispatch(r.forget());
|
2015-05-28 09:17:30 +03:00
|
|
|
}
|
|
|
|
|
2015-07-24 15:28:17 +03:00
|
|
|
void MediaDecoderStateMachine::RemoveOutputStream(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("RemoveOutputStream=%p!", aStream);
|
2015-12-24 05:14:16 +03:00
|
|
|
mOutputStreamManager->Remove(aStream);
|
|
|
|
if (mOutputStreamManager->IsEmpty()) {
|
2017-06-12 22:34:10 +03:00
|
|
|
nsCOMPtr<nsIRunnable> r =
|
|
|
|
NewRunnableMethod<bool>("MediaDecoderStateMachine::SetAudioCaptured",
|
|
|
|
this,
|
|
|
|
&MediaDecoderStateMachine::SetAudioCaptured,
|
|
|
|
false);
|
2015-09-09 05:12:35 +03:00
|
|
|
OwnerThread()->Dispatch(r.forget());
|
2015-07-24 15:28:17 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-19 10:02:33 +03:00
|
|
|
class VideoQueueMemoryFunctor : public nsDequeFunctor
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
VideoQueueMemoryFunctor()
|
|
|
|
: mSize(0)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf);
|
|
|
|
|
2017-02-09 06:06:36 +03:00
|
|
|
virtual void operator()(void* aObject)
|
2017-07-19 10:02:33 +03:00
|
|
|
{
|
|
|
|
const VideoData* v = static_cast<const VideoData*>(aObject);
|
|
|
|
mSize += v->SizeOfIncludingThis(MallocSizeOf);
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t mSize;
|
|
|
|
};
|
|
|
|
|
|
|
|
class AudioQueueMemoryFunctor : public nsDequeFunctor
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
AudioQueueMemoryFunctor()
|
|
|
|
: mSize(0)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf);
|
|
|
|
|
2017-02-09 06:06:36 +03:00
|
|
|
virtual void operator()(void* aObject)
|
2017-07-19 10:02:33 +03:00
|
|
|
{
|
|
|
|
const AudioData* audioData = static_cast<const AudioData*>(aObject);
|
|
|
|
mSize += audioData->SizeOfIncludingThis(MallocSizeOf);
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t mSize;
|
|
|
|
};
|
|
|
|
|
2016-04-20 09:45:40 +03:00
|
|
|
size_t
|
|
|
|
MediaDecoderStateMachine::SizeOfVideoQueue() const
|
|
|
|
{
|
2017-07-19 10:02:33 +03:00
|
|
|
VideoQueueMemoryFunctor functor;
|
|
|
|
mVideoQueue.LockedForEach(functor);
|
|
|
|
return functor.mSize;
|
2016-04-20 09:45:40 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
size_t
|
|
|
|
MediaDecoderStateMachine::SizeOfAudioQueue() const
|
|
|
|
{
|
2017-07-19 10:02:33 +03:00
|
|
|
AudioQueueMemoryFunctor functor;
|
|
|
|
mAudioQueue.LockedForEach(functor);
|
|
|
|
return functor.mSize;
|
2016-04-20 09:45:40 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
AbstractCanonical<media::TimeIntervals>*
|
2016-04-20 11:02:37 +03:00
|
|
|
MediaDecoderStateMachine::CanonicalBuffered() const
|
2016-04-20 09:45:40 +03:00
|
|
|
{
|
2016-04-20 09:45:41 +03:00
|
|
|
return mReader->CanonicalBuffered();
|
2016-04-20 09:45:40 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
MediaEventSource<void>&
|
2016-04-20 11:02:37 +03:00
|
|
|
MediaDecoderStateMachine::OnMediaNotSeekable() const
|
2016-04-20 09:45:40 +03:00
|
|
|
{
|
2016-04-20 09:45:41 +03:00
|
|
|
return mReader->OnMediaNotSeekable();
|
2016-04-20 09:45:40 +03:00
|
|
|
}
|
|
|
|
|
2016-04-27 09:50:39 +03:00
|
|
|
const char*
|
|
|
|
MediaDecoderStateMachine::AudioRequestStatus() const
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-12-20 07:54:23 +03:00
|
|
|
if (IsRequestingAudioData()) {
|
2016-12-20 09:31:29 +03:00
|
|
|
MOZ_DIAGNOSTIC_ASSERT(!IsWaitingAudioData());
|
2016-04-27 09:50:39 +03:00
|
|
|
return "pending";
|
2016-12-20 09:31:29 +03:00
|
|
|
} else if (IsWaitingAudioData()) {
|
2016-04-27 09:50:39 +03:00
|
|
|
return "waiting";
|
|
|
|
}
|
|
|
|
return "idle";
|
|
|
|
}
|
|
|
|
|
|
|
|
const char*
|
|
|
|
MediaDecoderStateMachine::VideoRequestStatus() const
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2016-12-20 08:17:05 +03:00
|
|
|
if (IsRequestingVideoData()) {
|
2016-12-20 09:31:29 +03:00
|
|
|
MOZ_DIAGNOSTIC_ASSERT(!IsWaitingVideoData());
|
2016-04-27 09:50:39 +03:00
|
|
|
return "pending";
|
2016-12-20 09:31:29 +03:00
|
|
|
} else if (IsWaitingVideoData()) {
|
2016-04-27 09:50:39 +03:00
|
|
|
return "waiting";
|
|
|
|
}
|
|
|
|
return "idle";
|
|
|
|
}
|
|
|
|
|
2016-05-30 10:25:10 +03:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::OnSuspendTimerResolved()
|
|
|
|
{
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("OnSuspendTimerResolved");
|
2016-05-30 10:25:10 +03:00
|
|
|
mVideoDecodeSuspendTimer.CompleteRequest();
|
2016-10-18 05:29:14 +03:00
|
|
|
mStateObj->HandleVideoSuspendTimeout();
|
2016-05-30 10:25:10 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2017-03-08 16:28:01 +03:00
|
|
|
MediaDecoderStateMachine::CancelSuspendTimer()
|
2016-05-30 10:25:10 +03:00
|
|
|
{
|
2017-03-24 06:33:06 +03:00
|
|
|
LOG("CancelSuspendTimer: State: %s, Timer.IsScheduled: %c",
|
|
|
|
ToStateStr(mStateObj->GetState()),
|
|
|
|
mVideoDecodeSuspendTimer.IsScheduled() ? 'T' : 'F');
|
2016-05-30 10:25:10 +03:00
|
|
|
MOZ_ASSERT(OnTaskQueue());
|
2017-03-08 16:28:01 +03:00
|
|
|
if (mVideoDecodeSuspendTimer.IsScheduled()) {
|
|
|
|
mOnPlaybackEvent.Notify(MediaEventType::CancelVideoSuspendTimer);
|
|
|
|
}
|
|
|
|
mVideoDecodeSuspendTimer.Reset();
|
2016-05-30 10:25:10 +03:00
|
|
|
}
|
|
|
|
|
2012-11-14 23:45:33 +04:00
|
|
|
} // namespace mozilla
|
|
|
|
|
2017-03-24 06:33:06 +03:00
|
|
|
// avoid redefined macro in unified build
|
|
|
|
#undef LOG
|
2017-03-24 06:35:45 +03:00
|
|
|
#undef LOGV
|
2017-03-24 06:38:22 +03:00
|
|
|
#undef LOGW
|
2017-03-24 07:42:49 +03:00
|
|
|
#undef SLOGW
|
2015-04-07 22:20:43 +03:00
|
|
|
#undef NS_DispatchToMainThread
|