Bug 1464268 - Use timeslice to gather blobs in MediaEncoder. r=bryce

This brings back dataavailable events that are based on the timeslice interval
to MediaRecorder.

Prior to this stack we used wall-clock time to decide whether to fire
dataavailable events. The spec says "once a minimum of timeslice
milliseconds of data have been collected", meaning the old behavior was wrong
as no guarantee could be given to how much data had been collected, and
especially muxed, for a given duration of wall-clock time.

With this patch the responsibility of triggering dataavailable events lies with
MediaEncoder, which also knows the timeslice. This means it can issue blobs
precisely when they contain enough data to fill the timeslice.

Buffering in the ContainerWriter is a problem that can result in, in the worst
case, empty blobs, as the logic is based on the input to the muxer where
timestamps are still known.

Differential Revision: https://phabricator.services.mozilla.com/D95724
This commit is contained in:
Andreas Pehrson 2021-02-11 13:38:59 +00:00
Родитель d8073408f9
Коммит 535f34ddd6
3 изменённых файлов: 119 добавлений и 5 удалений

Просмотреть файл

@ -932,6 +932,9 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
mDataAvailableListener = mEncoder->DataAvailableEvent().Connect(
mMainThread, this, &Session::OnDataAvailable);
if (mRecorder->mAudioNode) {
mEncoder->ConnectAudioNode(mRecorder->mAudioNode,
mRecorder->mAudioNodeOutput);
@ -1073,6 +1076,18 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
}));
}
void OnDataAvailable(const RefPtr<BlobImpl>& aBlob) {
if (mRunningState.isErr() &&
mRunningState.unwrapErr() == NS_ERROR_DOM_SECURITY_ERR) {
return;
}
if (NS_WARN_IF(NS_FAILED(mRecorder->CreateAndDispatchBlobEvent(aBlob)))) {
LOG(LogLevel::Warning,
("MediaRecorder %p Creating or dispatching BlobEvent failed", this));
DoSessionEndTask(NS_OK);
}
}
void MediaEncoderError() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
NS_DispatchToMainThread(NewRunnableMethod<nsresult>(
@ -1125,7 +1140,10 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
return ShutdownPromise::CreateAndResolve(true, __func__);
})
->Then(mMainThread, __func__,
[encoder = mEncoder] { return encoder->Cancel(); })
[this, self = RefPtr<Session>(this)] {
mDataAvailableListener.DisconnectIfExists();
return mEncoder->Cancel();
})
->Then(mEncoderThread, __func__, [] {
// Meh, this is just to convert the promise type to match
// mShutdownPromise.
@ -1200,6 +1218,8 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
RefPtr<MediaEncoder> mEncoder;
// Listener through which MediaEncoder signals us.
RefPtr<EncoderListener> mEncoderListener;
// Listener connected to mMediaEncoder::DataAvailableEvent().
MediaEventListener mDataAvailableListener;
// Set in Shutdown() and resolved when shutdown is complete.
RefPtr<ShutdownPromise> mShutdownPromise;
// Session mimeType

Просмотреть файл

@ -437,6 +437,8 @@ MediaEncoder::MediaEncoder(
mCompleted(false),
mError(false) {
if (mAudioEncoder) {
mAudioPushListener = mEncodedAudioQueue->PushEvent().Connect(
mEncoderThread, this, &MediaEncoder::OnEncodedAudioPushed);
mAudioFinishListener = mEncodedAudioQueue->FinishEvent().Connect(
mEncoderThread, this, &MediaEncoder::MaybeShutdown);
nsresult rv = mEncoderThread->Dispatch(NS_NewRunnableFunction(
@ -447,9 +449,12 @@ MediaEncoder::MediaEncoder(
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
} else {
mMuxedAudioEndTime = TimeUnit::FromInfinity();
mEncodedAudioQueue->Finish();
}
if (mVideoEncoder) {
mVideoPushListener = mEncodedVideoQueue->PushEvent().Connect(
mEncoderThread, this, &MediaEncoder::OnEncodedVideoPushed);
mVideoFinishListener = mEncodedVideoQueue->FinishEvent().Connect(
mEncoderThread, this, &MediaEncoder::MaybeShutdown);
nsresult rv = mEncoderThread->Dispatch(NS_NewRunnableFunction(
@ -460,6 +465,7 @@ MediaEncoder::MediaEncoder(
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
} else {
mMuxedVideoEndTime = TimeUnit::FromInfinity();
mEncodedVideoQueue->Finish();
}
}
@ -824,7 +830,9 @@ RefPtr<GenericNonExclusivePromise> MediaEncoder::Shutdown() {
mShutdownPromise->Then(mEncoderThread, __func__,
[self = RefPtr<MediaEncoder>(this), this] {
mMuxer->Disconnect();
mAudioPushListener.DisconnectIfExists();
mAudioFinishListener.DisconnectIfExists();
mVideoPushListener.DisconnectIfExists();
mVideoFinishListener.DisconnectIfExists();
});
@ -884,6 +892,10 @@ void MediaEncoder::SetError() {
}
auto MediaEncoder::RequestData() -> RefPtr<BlobPromise> {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
TimeUnit muxedEndTime = std::min(mMuxedAudioEndTime, mMuxedVideoEndTime);
mLastBlobTime = muxedEndTime;
mLastExtractTime = muxedEndTime;
return Extract()->Then(
mMainThread, __func__,
[this, self = RefPtr<MediaEncoder>(this)](
@ -903,6 +915,49 @@ void MediaEncoder::MaybeCreateMutableBlobStorage() {
}
}
void MediaEncoder::OnEncodedAudioPushed(const RefPtr<EncodedFrame>& aFrame) {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
mMuxedAudioEndTime = aFrame->GetEndTime();
MaybeExtractOrGatherBlob();
}
void MediaEncoder::OnEncodedVideoPushed(const RefPtr<EncodedFrame>& aFrame) {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
mMuxedVideoEndTime = aFrame->GetEndTime();
MaybeExtractOrGatherBlob();
}
void MediaEncoder::MaybeExtractOrGatherBlob() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
TimeUnit muxedEndTime = std::min(mMuxedAudioEndTime, mMuxedVideoEndTime);
if ((muxedEndTime - mLastBlobTime).ToTimeDuration() >= mTimeslice) {
LOG(LogLevel::Verbose, ("MediaEncoder %p Muxed %.2fs of data since last "
"blob. Issuing new blob.",
this, (muxedEndTime - mLastBlobTime).ToSeconds()));
RequestData()->Then(mEncoderThread, __func__,
[this, self = RefPtr<MediaEncoder>(this)](
const BlobPromise::ResolveOrRejectValue& aValue) {
if (aValue.IsReject()) {
SetError();
return;
}
RefPtr<BlobImpl> blob = aValue.ResolveValue();
mDataAvailableEvent.Notify(std::move(blob));
});
}
if (muxedEndTime - mLastExtractTime > TimeUnit::FromSeconds(1)) {
// Extract data from the muxer at least every second.
LOG(LogLevel::Verbose,
("MediaEncoder %p Muxed %.2fs of data since last "
"extract. Extracting more data into blob.",
this, (muxedEndTime - mLastExtractTime).ToSeconds()));
mLastExtractTime = muxedEndTime;
Unused << Extract();
}
}
// Pull encoded media data from MediaEncoder and put into MutableBlobStorage.
RefPtr<GenericPromise> MediaEncoder::Extract() {
MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
@ -1073,8 +1128,7 @@ void MediaEncoder::UpdateStarted() {
mStarted = true;
// Start issuing timeslice-based blobs.
MOZ_ASSERT(mLastBlobTimeStamp.IsNull());
mLastBlobTimeStamp = TimeStamp::Now();
MOZ_ASSERT(mLastBlobTime == TimeUnit::Zero());
nsresult rv = mEncoderThread->Dispatch(NS_NewRunnableFunction(
"mozilla::MediaEncoder::NotifyStarted", [ls = mListeners.Clone()] {

Просмотреть файл

@ -246,6 +246,12 @@ class MediaEncoder {
*/
RefPtr<BlobPromise> RequestData();
// Event that gets notified after we have muxed at least mTimeslice worth of
// data into the current blob storage.
MediaEventSource<RefPtr<dom::BlobImpl>>& DataAvailableEvent() {
return mDataAvailableEvent;
}
protected:
~MediaEncoder();
@ -284,6 +290,24 @@ class MediaEncoder {
*/
void MaybeCreateMutableBlobStorage();
/**
* Called when an encoded audio frame has been pushed by the audio encoder.
*/
void OnEncodedAudioPushed(const RefPtr<EncodedFrame>& aFrame);
/**
* Called when an encoded video frame has been pushed by the video encoder.
*/
void OnEncodedVideoPushed(const RefPtr<EncodedFrame>& aFrame);
/**
* If enough data has been pushed to the muxer, extract it into the current
* blob storage. If more than mTimeslice data has been pushed to the muxer
* since the last DataAvailableEvent was notified, also gather the blob and
* notify MediaRecorder.
*/
void MaybeExtractOrGatherBlob();
// Extracts encoded and muxed data into the current blob storage, creating one
// if it doesn't exist. The returned promise resolves when data has been
// stored into the blob.
@ -324,9 +348,13 @@ class MediaEncoder {
private:
nsTArray<RefPtr<MediaEncoderListener>> mListeners;
MediaEventListener mAudioPushListener;
MediaEventListener mAudioFinishListener;
MediaEventListener mVideoPushListener;
MediaEventListener mVideoFinishListener;
MediaEventProducer<RefPtr<dom::BlobImpl>> mDataAvailableEvent;
// The AudioNode we are encoding.
// Will be null when input is media stream or destination node.
RefPtr<dom::AudioNode> mAudioNode;
@ -351,8 +379,20 @@ class MediaEncoder {
// If set, is a promise for the latest GatherBlob() operation. Allows
// GatherBlob() operations to be serialized in order to avoid races.
RefPtr<BlobPromise> mBlobPromise;
// Timestamp of the last fired dataavailable event.
TimeStamp mLastBlobTimeStamp;
// The end time of the muxed data in the last gathered blob. If more than one
// track is present, this is the end time of the track that ends the earliest
// in the last blob. Encoder thread only.
media::TimeUnit mLastBlobTime;
// The end time of the muxed data in the current blob storage. If more than
// one track is present, this is the end time of the track that ends the
// earliest in the current blob storage. Encoder thread only.
media::TimeUnit mLastExtractTime;
// The end time of encoded audio data sent to the muxer. Positive infinity if
// there is no audio encoder. Encoder thread only.
media::TimeUnit mMuxedAudioEndTime;
// The end time of encoded video data sent to the muxer. Positive infinity if
// there is no video encoder. Encoder thread only.
media::TimeUnit mMuxedVideoEndTime;
TimeStamp mStartTime;
bool mInitialized;