Bug 1743834 - Shut down the AudioSink when audio is muted. r=alwu,media-playback-reviewers

This does the following:
- When the media is muted, shut down and release the AudioSink ;
- While the media is muted, use the system clock to make video advance ;
- Each time the clock is queried, check if some audio packets should be
discarded because they are in the past, compared to the media time ;
- While muted, if the audio finished, resolve the EndedPromise ;
- When the media is un-muted, a new AudioSink is created, and its clock starts
to be in use.

This works well and A/V sync is correct, but a micro-stuttering is perceptible
on the video when looking carefully, because of the time it takes to open the
audio stream. This is fixed in subsequent patches.

Differential Revision: https://phabricator.services.mozilla.com/D136234
This commit is contained in:
Paul Adenot 2022-04-20 12:07:09 +00:00
Родитель c06f4b14cf
Коммит 5112586411
3 изменённых файлов: 112 добавлений и 13 удалений

Просмотреть файл

@ -140,7 +140,7 @@ void AudioSink::ReenqueueUnplayedAudioDataIfNeeded() {
AlignedAudioBuffer queuedAudio(sampleCount);
DebugOnly<int> samplesRead =
mProcessedSPSCQueue->Dequeue(queuedAudio.Data(), sampleCount);
mProcessedSPSCQueue->Dequeue(queuedAudio.Data(), sampleCount);
MOZ_ASSERT(samplesRead == sampleCount);
// Extrapolate mOffset, mTime from the front of the queue
@ -149,10 +149,15 @@ void AudioSink::ReenqueueUnplayedAudioDataIfNeeded() {
// For `mTime`, assume there hasn't been a discontinuity recently.
RefPtr<AudioData> frontPacket = mAudioQueue.PeekFront();
RefPtr<AudioData> data =
new AudioData(frontPacket->mOffset, frontPacket->mTime - duration, std::move(queuedAudio),
channelCount, rate);
new AudioData(frontPacket->mOffset, frontPacket->mTime - duration,
std::move(queuedAudio), channelCount, rate);
MOZ_DIAGNOSTIC_ASSERT(duration == data->mDuration, "must be equal");
SINK_LOG(
"Muting: Pushing back %u frames (%lfms) from the ring buffer back into "
"the audio queue",
frameCount, static_cast<float>(frameCount) / rate);
mAudioQueue.PushFront(data);
}

Просмотреть файл

@ -45,10 +45,18 @@ TimeUnit AudioSinkWrapper::GetEndTime(TrackType aType) const {
if (aType == TrackInfo::kAudioTrack && mAudioSink) {
return mAudioSink->GetEndTime();
}
if (aType == TrackInfo::kAudioTrack && !mAudioSink && IsMuted()) {
if (IsPlaying()) {
return GetSystemClockPosition(TimeStamp::Now());
}
return mPlayDuration;
}
return TimeUnit::Zero();
}
TimeUnit AudioSinkWrapper::GetVideoPosition(TimeStamp aNow) const {
TimeUnit AudioSinkWrapper::GetSystemClockPosition(TimeStamp aNow) const {
AssertOwnerThread();
MOZ_ASSERT(!mPlayStartTime.IsNull());
// Time elapsed since we started playing.
@ -57,6 +65,11 @@ TimeUnit AudioSinkWrapper::GetVideoPosition(TimeStamp aNow) const {
return mPlayDuration + TimeUnit::FromSeconds(delta * mParams.mPlaybackRate);
}
bool AudioSinkWrapper::IsMuted() const {
AssertOwnerThread();
return mParams.mVolume == 0.0;
}
TimeUnit AudioSinkWrapper::GetPosition(TimeStamp* aTimeStamp) {
AssertOwnerThread();
MOZ_ASSERT(mIsStarted, "Must be called after playback starts.");
@ -64,16 +77,30 @@ TimeUnit AudioSinkWrapper::GetPosition(TimeStamp* aTimeStamp) {
TimeUnit pos;
TimeStamp t = TimeStamp::Now();
if (!mAudioEnded) {
if (!mAudioEnded && !IsMuted()) {
MOZ_ASSERT(mAudioSink);
// Rely on the audio sink to report playback position when it is not ended.
pos = mAudioSink->GetPosition();
LOGV("%p: Getting position from the Audio Sink %lf", this, pos.ToSeconds());
} else if (!mPlayStartTime.IsNull()) {
// Calculate playback position using system clock if we are still playing.
pos = GetVideoPosition(t);
// Calculate playback position using system clock if we are still playing,
// but not rendering the audio, because this audio sink is muted.
pos = GetSystemClockPosition(t);
LOGV("%p: Getting position from the system clock %lf", this,
pos.ToSeconds());
if (mAudioQueue.GetSize() > 0 && IsMuted()) {
// audio track, but it's muted and won't be dequeued, discard packets that
// are behind the current media time, to keep the queue size under
// control.
DropAudioPacketsIfNeeded(pos);
// If muted, it's necessary to manually check if the audio has "ended",
// meaning that all the audio packets have been consumed, to resolve the
// ended promise.
if (CheckIfEnded()) {
MOZ_ASSERT(!mAudioSink);
mEndedPromiseHolder.Resolve(true, __func__);
}
}
} else {
// Return how long we've played if we are not playing.
pos = mPlayDuration;
@ -87,6 +114,10 @@ TimeUnit AudioSinkWrapper::GetPosition(TimeStamp* aTimeStamp) {
return pos;
}
bool AudioSinkWrapper::CheckIfEnded() const {
return mAudioQueue.IsFinished() && mAudioQueue.GetSize() == 0u;
}
bool AudioSinkWrapper::HasUnplayedFrames(TrackType aType) const {
AssertOwnerThread();
return mAudioSink ? mAudioSink->HasUnplayedFrames() : false;
@ -97,8 +128,64 @@ media::TimeUnit AudioSinkWrapper::UnplayedDuration(TrackType aType) const {
return mAudioSink ? mAudioSink->UnplayedDuration() : media::TimeUnit::Zero();
}
void AudioSinkWrapper::DropAudioPacketsIfNeeded(
const TimeUnit& aMediaPosition) {
MOZ_ASSERT(!mAudioSink);
RefPtr<AudioData> audio = mAudioQueue.PeekFront();
uint32_t dropped = 0;
while (audio && audio->mTime + audio->mDuration < aMediaPosition) {
// drop this packet, try the next one
audio = mAudioQueue.PopFront();
dropped++;
if (audio) {
LOG("Dropping audio packets: media position: %lf, "
"packet dropped: [%lf, %lf] (%u so far).\n",
aMediaPosition.ToSeconds(), audio->mTime.ToSeconds(),
(audio->mTime + audio->mDuration).ToSeconds(), dropped);
}
audio = mAudioQueue.PeekFront();
}
}
void AudioSinkWrapper::OnMuted(bool aMuted) {
AssertOwnerThread();
if (aMuted) {
if (mAudioSink) {
LOG("AudioSinkWrapper muted, shutting down AudioStream.");
mAudioSinkEndedPromise.DisconnectIfExists();
mPlayDuration = mAudioSink->GetPosition();
mPlayStartTime = TimeStamp::Now();
Maybe<MozPromiseHolder<MediaSink::EndedPromise>> rv =
mAudioSink->Shutdown(ShutdownCause::Muting);
MOZ_ASSERT(rv.isSome());
mEndedPromiseHolder = std::move(rv.ref());
mAudioSink = nullptr;
}
} else {
if (!IsPlaying()) {
return;
}
LOG("AudioSinkWrapper unmuted, re-creating an AudioStream.");
TimeUnit mediaPosition = GetSystemClockPosition(TimeStamp::Now());
DropAudioPacketsIfNeeded(mediaPosition);
nsresult rv = StartAudioSink(mediaPosition);
if (NS_FAILED(rv)) {
NS_WARNING(
"Could not start AudioSink from AudioSinkWrapper when unmuting");
}
}
}
void AudioSinkWrapper::SetVolume(double aVolume) {
AssertOwnerThread();
if (aVolume == 0. && mParams.mVolume != 0.) {
OnMuted(true);
} else if (aVolume != 0. && mParams.mVolume == 0.) {
OnMuted(false);
}
mParams.mVolume = aVolume;
if (mAudioSink) {
mAudioSink->SetVolume(aVolume);
@ -114,22 +201,22 @@ void AudioSinkWrapper::SetStreamName(const nsAString& aStreamName) {
void AudioSinkWrapper::SetPlaybackRate(double aPlaybackRate) {
AssertOwnerThread();
if (!mAudioEnded) {
if (!mAudioEnded && mAudioSink) {
// Pass the playback rate to the audio sink. The underlying AudioStream
// will handle playback rate changes and report correct audio position.
mAudioSink->SetPlaybackRate(aPlaybackRate);
} else if (!mPlayStartTime.IsNull()) {
// Adjust playback duration and start time when we are still playing.
TimeStamp now = TimeStamp::Now();
mPlayDuration = GetVideoPosition(now);
mPlayDuration = GetSystemClockPosition(now);
mPlayStartTime = now;
}
// mParams.mPlaybackRate affects GetVideoPosition(). It should be updated
// after the calls to GetVideoPosition();
// mParams.mPlaybackRate affects GetSystemClockPosition(). It should be
// updated after the calls to GetSystemClockPosition();
mParams.mPlaybackRate = aPlaybackRate;
// Do nothing when not playing. Changes in playback rate will be taken into
// account by GetVideoPosition().
// account by GetSystemClockPosition().
}
void AudioSinkWrapper::SetPreservesPitch(bool aPreservesPitch) {

Просмотреть файл

@ -66,6 +66,7 @@ class AudioSinkWrapper : public MediaSink {
media::TimeUnit GetPosition(TimeStamp* aTimeStamp = nullptr) override;
bool HasUnplayedFrames(TrackType aType) const override;
media::TimeUnit UnplayedDuration(TrackType aType) const override;
void DropAudioPacketsIfNeeded(const media::TimeUnit& aMediaPosition);
void SetVolume(double aVolume) override;
void SetStreamName(const nsAString& aStreamName) override;
@ -86,6 +87,8 @@ class AudioSinkWrapper : public MediaSink {
void GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) override;
private:
bool IsMuted() const;
void OnMuted(bool aMuted);
virtual ~AudioSinkWrapper();
void AssertOwnerThread() const {
@ -94,7 +97,11 @@ class AudioSinkWrapper : public MediaSink {
nsresult StartAudioSink(const media::TimeUnit& aStartTime);
media::TimeUnit GetVideoPosition(TimeStamp aNow) const;
// Get the current media position using the system clock. This is used when
// the audio is muted, or when the media has no audio track. Otherwise, the
// media's position is based on the clock of the AudioStream.
media::TimeUnit GetSystemClockPosition(TimeStamp aNow) const;
bool CheckIfEnded() const;
void OnAudioEnded();