зеркало из https://github.com/mozilla/pjs.git
Bug 580531 - Implement HTMLMediaElement.mozPaintedFrames and mozFrameDelay. r=roc
This commit is contained in:
Родитель
dc460d5e2d
Коммит
fa6037c7a1
|
@ -204,3 +204,16 @@ NS_IMETHODIMP nsHTMLVideoElement::GetMozPresentedFrames(PRUint32 *aMozPresentedF
|
|||
*aMozPresentedFrames = mDecoder ? mDecoder->GetFrameStatistics().GetPresentedFrames() : 0;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
NS_IMETHODIMP nsHTMLVideoElement::GetMozPaintedFrames(PRUint32 *aMozPaintedFrames)
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
||||
*aMozPaintedFrames = (!mDecoder || !GetImageContainer()) ? 0 : GetImageContainer()->GetPaintCount();
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
NS_IMETHODIMP nsHTMLVideoElement::GetMozFrameDelay(double *aMozFrameDelay) {
|
||||
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
||||
*aMozFrameDelay = mDecoder ? mDecoder->GetFrameDelay() : 0;
|
||||
return NS_OK;
|
||||
}
|
||||
|
|
|
@ -985,7 +985,7 @@ nsresult nsBuiltinDecoderStateMachine::Run()
|
|||
VideoData* videoData = FindStartTime();
|
||||
if (videoData) {
|
||||
MonitorAutoExit exitMon(mDecoder->GetMonitor());
|
||||
RenderVideoFrame(videoData);
|
||||
RenderVideoFrame(videoData, TimeStamp::Now());
|
||||
}
|
||||
|
||||
// Start the decode threads, so that we can pre buffer the streams.
|
||||
|
@ -1107,7 +1107,7 @@ nsresult nsBuiltinDecoderStateMachine::Run()
|
|||
if (video) {
|
||||
NS_ASSERTION(video->mTime <= seekTime && seekTime <= video->mEndTime,
|
||||
"Seek target should lie inside the first frame after seek");
|
||||
RenderVideoFrame(video);
|
||||
RenderVideoFrame(video, TimeStamp::Now());
|
||||
mReader->mVideoQueue.PopFront();
|
||||
nsCOMPtr<nsIRunnable> event =
|
||||
NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::Invalidate);
|
||||
|
@ -1270,7 +1270,8 @@ nsresult nsBuiltinDecoderStateMachine::Run()
|
|||
return NS_OK;
|
||||
}
|
||||
|
||||
void nsBuiltinDecoderStateMachine::RenderVideoFrame(VideoData* aData)
|
||||
void nsBuiltinDecoderStateMachine::RenderVideoFrame(VideoData* aData,
|
||||
TimeStamp aTarget)
|
||||
{
|
||||
NS_ASSERTION(IsCurrentThread(mDecoder->mStateMachineThread), "Should be on state machine thread.");
|
||||
|
||||
|
@ -1281,7 +1282,10 @@ void nsBuiltinDecoderStateMachine::RenderVideoFrame(VideoData* aData)
|
|||
nsRefPtr<Image> image = aData->mImage;
|
||||
if (image) {
|
||||
const nsVideoInfo& info = mReader->GetInfo();
|
||||
mDecoder->SetVideoData(gfxIntSize(info.mDisplay.width, info.mDisplay.height), info.mPixelAspectRatio, image);
|
||||
mDecoder->SetVideoData(gfxIntSize(info.mDisplay.width, info.mDisplay.height),
|
||||
info.mPixelAspectRatio,
|
||||
image,
|
||||
aTarget);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1362,12 +1366,14 @@ void nsBuiltinDecoderStateMachine::AdvanceFrame()
|
|||
|
||||
if (currentFrame) {
|
||||
// Decode one frame and display it
|
||||
TimeStamp presTime = mPlayStartTime - mPlayDuration +
|
||||
TimeDuration::FromMilliseconds(currentFrame->mTime - mStartTime);
|
||||
NS_ASSERTION(currentFrame->mTime >= mStartTime, "Should have positive frame time");
|
||||
{
|
||||
MonitorAutoExit exitMon(mDecoder->GetMonitor());
|
||||
// If we have video, we want to increment the clock in steps of the frame
|
||||
// duration.
|
||||
RenderVideoFrame(currentFrame);
|
||||
RenderVideoFrame(currentFrame, presTime);
|
||||
}
|
||||
mDecoder->GetFrameStatistics().NotifyPresentedFrame();
|
||||
PRInt64 now = (TimeStamp::Now() - mPlayStartTime + mPlayDuration).ToMilliseconds();
|
||||
|
|
|
@ -308,7 +308,8 @@ protected:
|
|||
|
||||
// Performs YCbCr to RGB conversion, and pushes the image down the
|
||||
// rendering pipeline. Called on the state machine thread.
|
||||
void RenderVideoFrame(VideoData* aData);
|
||||
void RenderVideoFrame(VideoData* aData,
|
||||
TimeStamp aTarget);
|
||||
|
||||
// If we have video, display a video frame if it's time for display has
|
||||
// arrived, otherwise sleep until it's time for the next sample. Update
|
||||
|
|
|
@ -255,7 +255,8 @@ void nsMediaDecoder::FireTimeUpdate()
|
|||
|
||||
void nsMediaDecoder::SetVideoData(const gfxIntSize& aSize,
|
||||
float aPixelAspectRatio,
|
||||
Image* aImage)
|
||||
Image* aImage,
|
||||
TimeStamp aTarget)
|
||||
{
|
||||
nsAutoLock lock(mVideoUpdateLock);
|
||||
|
||||
|
@ -268,12 +269,26 @@ void nsMediaDecoder::SetVideoData(const gfxIntSize& aSize,
|
|||
}
|
||||
if (mImageContainer && aImage) {
|
||||
gfxIntSize oldFrameSize = mImageContainer->GetCurrentSize();
|
||||
|
||||
TimeStamp paintTime = mImageContainer->GetPaintTime();
|
||||
if (!paintTime.IsNull() && !mPaintTarget.IsNull()) {
|
||||
mPaintDelay = paintTime - mPaintTarget;
|
||||
}
|
||||
|
||||
mImageContainer->SetCurrentImage(aImage);
|
||||
gfxIntSize newFrameSize = mImageContainer->GetCurrentSize();
|
||||
if (oldFrameSize != newFrameSize) {
|
||||
mImageContainerSizeChanged = PR_TRUE;
|
||||
}
|
||||
}
|
||||
|
||||
mPaintTarget = aTarget;
|
||||
}
|
||||
|
||||
double nsMediaDecoder::GetFrameDelay()
|
||||
{
|
||||
nsAutoLock lock(mVideoUpdateLock);
|
||||
return mPaintDelay.ToSeconds();
|
||||
}
|
||||
|
||||
void nsMediaDecoder::PinForSeek()
|
||||
|
|
|
@ -266,6 +266,11 @@ public:
|
|||
PRUint32& mDecoded;
|
||||
};
|
||||
|
||||
// Time in seconds by which the last painted video frame was late by.
|
||||
// E.g. if the last painted frame should have been painted at time t,
|
||||
// but was actually painted at t+n, this returns n in seconds. Threadsafe.
|
||||
double GetFrameDelay();
|
||||
|
||||
// Return statistics. This is used for progress events and other things.
|
||||
// This can be called from any thread. It's only a snapshot of the
|
||||
// current state, since other threads might be changing the state
|
||||
|
@ -361,11 +366,13 @@ public:
|
|||
// thread; ImageContainers can be used from any thread.
|
||||
ImageContainer* GetImageContainer() { return mImageContainer; }
|
||||
|
||||
// Set the video width, height, pixel aspect ratio, and current image.
|
||||
// Ownership of the image is transferred to the decoder.
|
||||
// Set the video width, height, pixel aspect ratio, current image and
|
||||
// target paint time of the next video frame to be displayed.
|
||||
// Ownership of the image is transferred to the layers subsystem.
|
||||
void SetVideoData(const gfxIntSize& aSize,
|
||||
float aPixelAspectRatio,
|
||||
Image* aImage);
|
||||
Image* aImage,
|
||||
TimeStamp aTarget);
|
||||
|
||||
// Constructs the time ranges representing what segments of the media
|
||||
// are buffered and playable.
|
||||
|
@ -408,6 +415,15 @@ protected:
|
|||
// Counters related to decode and presentation of frames.
|
||||
FrameStatistics mFrameStats;
|
||||
|
||||
// The time at which the current video frame should have been painted.
|
||||
// Access protected by mVideoUpdateLock.
|
||||
TimeStamp mPaintTarget;
|
||||
|
||||
// The delay between the last video frame being presented and it being
|
||||
// painted. This is time elapsed after mPaintTarget until the most recently
|
||||
// painted frame appeared on screen. Access protected by mVideoUpdateLock.
|
||||
TimeDuration mPaintDelay;
|
||||
|
||||
nsRefPtr<ImageContainer> mImageContainer;
|
||||
|
||||
// Time that the last progress event was fired. Read/Write from the
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
#include "VideoUtils.h"
|
||||
#include "theora/theoradec.h"
|
||||
#include "nsTimeRanges.h"
|
||||
#include "mozilla/TimeStamp.h"
|
||||
|
||||
using namespace mozilla;
|
||||
|
||||
|
@ -290,7 +291,7 @@ nsresult nsOggReader::ReadMetadata()
|
|||
if (mTheoraState && mTheoraState->Init()) {
|
||||
gfxIntSize sz(mTheoraState->mInfo.pic_width,
|
||||
mTheoraState->mInfo.pic_height);
|
||||
mDecoder->SetVideoData(sz, mTheoraState->mPixelAspectRatio, nsnull);
|
||||
mDecoder->SetVideoData(sz, mTheoraState->mPixelAspectRatio, nsnull, TimeStamp::Now());
|
||||
}
|
||||
if (mVorbisState) {
|
||||
mVorbisState->Init();
|
||||
|
|
|
@ -48,7 +48,7 @@
|
|||
* @status UNDER_DEVELOPMENT
|
||||
*/
|
||||
|
||||
[scriptable, uuid(8a2e5756-e9a3-404f-9592-4e5b5830de85)]
|
||||
[scriptable, uuid(e1f52aa5-9962-4019-b7b3-af3aee6e4d48)]
|
||||
interface nsIDOMHTMLVideoElement : nsIDOMHTMLMediaElement
|
||||
{
|
||||
attribute long width;
|
||||
|
@ -70,5 +70,10 @@ interface nsIDOMHTMLVideoElement : nsIDOMHTMLMediaElement
|
|||
// pipeline. We may drop frames if they arrive late at the renderer.
|
||||
readonly attribute unsigned long mozPresentedFrames;
|
||||
|
||||
// Number of presented frames which were painted on screen.
|
||||
readonly attribute unsigned long mozPaintedFrames;
|
||||
|
||||
// Time which the last painted video frame was late by, in seconds.
|
||||
readonly attribute double mozFrameDelay;
|
||||
};
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче