Bug 1529581 - Make VideoFrameConverter serve frames though a MediaEventSource. r=bwc

Differential Revision: https://phabricator.services.mozilla.com/D129660
This commit is contained in:
Andreas Pehrson 2021-11-03 15:23:30 +00:00
Родитель 169ec781bd
Коммит b3cf495d68
7 изменённых файлов: 59 добавлений и 117 удалений

Просмотреть файл

@ -34,22 +34,12 @@ namespace mozilla {
static mozilla::LazyLogModule gVideoFrameConverterLog("VideoFrameConverter");
class VideoConverterListener {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoConverterListener)
virtual void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) = 0;
protected:
virtual ~VideoConverterListener() = default;
};
// An async video frame format converter.
//
// Input is typically a MediaTrackListener driven by MediaTrackGraph.
//
// Output is passed through to all added VideoConverterListeners on a TaskQueue
// thread whenever a frame is converted.
// Output is passed through to VideoFrameConvertedEvent() whenever a frame is
// converted.
class VideoFrameConverter {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoFrameConverter)
@ -145,34 +135,20 @@ class VideoFrameConverter {
})));
}
void AddListener(const RefPtr<VideoConverterListener>& aListener) {
MOZ_ALWAYS_SUCCEEDS(mTaskQueue->Dispatch(NS_NewRunnableFunction(
"VideoFrameConverter::AddListener",
[self = RefPtr<VideoFrameConverter>(this), this, aListener] {
MOZ_ASSERT(!mListeners.Contains(aListener));
mListeners.AppendElement(aListener);
})));
}
void RemoveListener(const RefPtr<VideoConverterListener>& aListener) {
MOZ_ALWAYS_SUCCEEDS(mTaskQueue->Dispatch(NS_NewRunnableFunction(
"VideoFrameConverter::RemoveListener",
[self = RefPtr<VideoFrameConverter>(this), this, aListener] {
mListeners.RemoveElement(aListener);
})));
}
void Shutdown() {
mPacer->Shutdown()->Then(mTaskQueue, __func__,
[self = RefPtr<VideoFrameConverter>(this), this] {
mPacingListener.DisconnectIfExists();
mListeners.Clear();
mBufferPool.Release();
mLastFrameQueuedForProcessing = FrameToProcess();
mLastFrameConverted = Nothing();
});
}
MediaEventSourceExc<webrtc::VideoFrame>& VideoFrameConvertedEvent() {
return mVideoFrameConvertedEvent;
}
protected:
struct FrameToProcess {
FrameToProcess() = default;
@ -214,8 +190,7 @@ class VideoFrameConverter {
MOZ_COUNTED_DTOR_VIRTUAL(VideoFrameConverter)
void VideoFrameConverted(const webrtc::VideoFrame& aVideoFrame,
int32_t aSerial) {
void VideoFrameConverted(webrtc::VideoFrame aVideoFrame, int32_t aSerial) {
MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
MOZ_LOG(
@ -235,9 +210,7 @@ class VideoFrameConverter {
mLastFrameConverted = Some(FrameConverted(aVideoFrame, aSerial));
for (RefPtr<VideoConverterListener>& listener : mListeners) {
listener->OnVideoFrameConverted(aVideoFrame);
}
mVideoFrameConvertedEvent.Notify(std::move(aVideoFrame));
}
void QueueForProcessing(RefPtr<layers::Image> aImage, TimeStamp aTime,
@ -328,7 +301,7 @@ class VideoFrameConverter {
// This is the same input frame as last time. Avoid a conversion.
webrtc::VideoFrame frame = mLastFrameConverted->mFrame;
frame.set_timestamp_us(time.us());
VideoFrameConverted(frame, mLastFrameConverted->mSerial);
VideoFrameConverted(std::move(frame), mLastFrameConverted->mSerial);
return;
}
@ -423,13 +396,17 @@ class VideoFrameConverter {
aFrame.Serial());
}
public:
const dom::RTCStatsTimestampMaker mTimestampMaker;
const RefPtr<TaskQueue> mTaskQueue;
protected:
// Used to pace future frames close to their rendering-time. Thread-safe.
const RefPtr<Pacer<FrameToProcess>> mPacer;
MediaEventProducerExc<webrtc::VideoFrame> mVideoFrameConvertedEvent;
// Accessed only from mTaskQueue.
MediaEventListener mPacingListener;
webrtc::I420BufferPool mBufferPool;
@ -440,7 +417,6 @@ class VideoFrameConverter {
#ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
size_t mFramesDropped = 0;
#endif
nsTArray<RefPtr<VideoConverterListener>> mListeners;
};
} // namespace mozilla

Просмотреть файл

@ -16,10 +16,17 @@ using namespace mozilla;
class VideoFrameConverterTest;
class FrameListener : public VideoConverterListener {
class FrameListener {
public:
void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) override {
mVideoFrameConvertedEvent.Notify(aVideoFrame, TimeStamp::Now());
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(FrameListener)
explicit FrameListener(MediaEventSourceExc<webrtc::VideoFrame>& aSource) {
mListener = aSource.Connect(AbstractThread::GetCurrent(), this,
&FrameListener::OnVideoFrameConverted);
}
void OnVideoFrameConverted(webrtc::VideoFrame aVideoFrame) {
mVideoFrameConvertedEvent.Notify(std::move(aVideoFrame), TimeStamp::Now());
}
MediaEventSource<webrtc::VideoFrame, TimeStamp>& VideoFrameConvertedEvent() {
@ -27,6 +34,9 @@ class FrameListener : public VideoConverterListener {
}
private:
~FrameListener() { mListener.Disconnect(); }
MediaEventListener mListener;
MediaEventProducer<webrtc::VideoFrame, TimeStamp> mVideoFrameConvertedEvent;
};
@ -35,7 +45,6 @@ class DebugVideoFrameConverter : public VideoFrameConverter {
explicit DebugVideoFrameConverter(
const dom::RTCStatsTimestampMaker& aTimestampMaker)
: VideoFrameConverter(aTimestampMaker) {}
using VideoFrameConverter::mTaskQueue;
using VideoFrameConverter::QueueForProcessing;
};
@ -48,9 +57,8 @@ class VideoFrameConverterTest : public ::testing::Test {
VideoFrameConverterTest()
: mTimestampMaker(dom::RTCStatsTimestampMaker()),
mConverter(MakeAndAddRef<DebugVideoFrameConverter>(mTimestampMaker)),
mListener(MakeAndAddRef<FrameListener>()) {
mConverter->AddListener(mListener);
}
mListener(MakeAndAddRef<FrameListener>(
mConverter->VideoFrameConvertedEvent())) {}
void TearDown() override { mConverter->Shutdown(); }

Просмотреть файл

@ -368,8 +368,7 @@ class VideoSessionConduit : public MediaSessionConduit {
* be invoked. This ensures the inserted video-frames can be
* transmitted by the conduit.
*/
virtual MediaConduitErrorCode SendVideoFrame(
const webrtc::VideoFrame& frame) = 0;
virtual MediaConduitErrorCode SendVideoFrame(webrtc::VideoFrame aFrame) = 0;
/**
* These methods allow unit tests to double-check that the

Просмотреть файл

@ -1322,7 +1322,7 @@ void WebrtcVideoConduit::RemoveSink(
}
MediaConduitErrorCode WebrtcVideoConduit::SendVideoFrame(
const webrtc::VideoFrame& frame) {
webrtc::VideoFrame aFrame) {
// XXX Google uses a "timestamp_aligner" to translate timestamps from the
// camera via TranslateTimestamp(); we should look at doing the same. This
// avoids sampling error when capturing frames, but google had to deal with
@ -1349,18 +1349,18 @@ MediaConduitErrorCode WebrtcVideoConduit::SendVideoFrame(
mSendStreamConfig.rtp.ssrcs.front());
bool updateSendResolution = mUpdateSendResolution.exchange(false);
if (updateSendResolution || frame.width() != mLastWidth ||
frame.height() != mLastHeight) {
if (updateSendResolution || aFrame.width() != mLastWidth ||
aFrame.height() != mLastHeight) {
// See if we need to recalculate what we're sending.
CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u",
__FUNCTION__, frame.width(), frame.height());
MOZ_ASSERT(frame.width() != 0 && frame.height() != 0);
__FUNCTION__, aFrame.width(), aFrame.height());
MOZ_ASSERT(aFrame.width() != 0 && aFrame.height() != 0);
// Note coverity will flag this since it thinks they can be 0
MOZ_ASSERT(mCurSendCodecConfig);
mLastWidth = frame.width();
mLastHeight = frame.height();
SelectSendResolution(frame.width(), frame.height());
mLastWidth = aFrame.width();
mLastHeight = aFrame.height();
SelectSendResolution(aFrame.width(), aFrame.height());
}
// adapt input video to wants of sink
@ -1369,8 +1369,8 @@ MediaConduitErrorCode WebrtcVideoConduit::SendVideoFrame(
}
if (!mVideoAdapter->AdaptFrameResolution(
frame.width(), frame.height(),
frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec, &cropWidth,
aFrame.width(), aFrame.height(),
aFrame.timestamp_us() * rtc::kNumNanosecsPerMicrosec, &cropWidth,
&cropHeight, &adaptedWidth, &adaptedHeight)) {
// VideoAdapter dropped the frame.
return kMediaConduitNoError;
@ -1392,13 +1392,13 @@ MediaConduitErrorCode WebrtcVideoConduit::SendVideoFrame(
return kMediaConduitNoError;
}
int cropX = (frame.width() - cropWidth) / 2;
int cropY = (frame.height() - cropHeight) / 2;
int cropX = (aFrame.width() - cropWidth) / 2;
int cropY = (aFrame.height() - cropHeight) / 2;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer;
if (adaptedWidth == frame.width() && adaptedHeight == frame.height()) {
if (adaptedWidth == aFrame.width() && adaptedHeight == aFrame.height()) {
// No adaption - optimized path.
buffer = frame.video_frame_buffer();
buffer = aFrame.video_frame_buffer();
} else {
// Adapted I420 frame.
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
@ -1407,19 +1407,19 @@ MediaConduitErrorCode WebrtcVideoConduit::SendVideoFrame(
CSFLogWarn(LOGTAG, "Creating a buffer for scaling failed, pool is empty");
return kMediaConduitNoError;
}
i420Buffer->CropAndScaleFrom(*frame.video_frame_buffer()->GetI420(), cropX,
i420Buffer->CropAndScaleFrom(*aFrame.video_frame_buffer()->GetI420(), cropX,
cropY, cropWidth, cropHeight);
buffer = i420Buffer;
}
MOZ_ASSERT(!frame.color_space(), "Unexpected use of color space");
MOZ_ASSERT(!frame.has_update_rect(), "Unexpected use of update rect");
MOZ_ASSERT(!aFrame.color_space(), "Unexpected use of color space");
MOZ_ASSERT(!aFrame.has_update_rect(), "Unexpected use of update rect");
mVideoBroadcaster.OnFrame(webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_timestamp_us(frame.timestamp_us())
.set_timestamp_rtp(frame.timestamp())
.set_rotation(frame.rotation())
.set_timestamp_us(aFrame.timestamp_us())
.set_timestamp_rtp(aFrame.timestamp())
.set_rotation(aFrame.rotation())
.build());
return kMediaConduitNoError;

Просмотреть файл

@ -105,8 +105,7 @@ class WebrtcVideoConduit
* be invoked. This ensures the inserted video-frames can be
* transmitted by the conduit.
*/
MediaConduitErrorCode SendVideoFrame(
const webrtc::VideoFrame& frame) override;
MediaConduitErrorCode SendVideoFrame(webrtc::VideoFrame aFrame) override;
bool SendRtp(const uint8_t* aData, size_t aLength,
const webrtc::PacketOptions& aOptions) override;

Просмотреть файл

@ -732,10 +732,10 @@ class MediaPipelineTransmit::PipelineListener
mConverter = std::move(aConverter);
}
void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) {
void OnVideoFrameConverted(webrtc::VideoFrame aVideoFrame) {
MOZ_RELEASE_ASSERT(mConduit->type() == MediaSessionConduit::VIDEO);
static_cast<VideoSessionConduit*>(mConduit.get())
->SendVideoFrame(aVideoFrame);
->SendVideoFrame(std::move(aVideoFrame));
}
// Implement MediaTrackListener
@ -767,42 +767,6 @@ class MediaPipelineTransmit::PipelineListener
bool mDirectConnect;
};
// Implements VideoConverterListener for MediaPipeline.
//
// We pass converted frames on to MediaPipelineTransmit::PipelineListener
// where they are further forwarded to VideoConduit.
// MediaPipelineTransmit calls Detach() during shutdown to ensure there is
// no cyclic dependencies between us and PipelineListener.
class MediaPipelineTransmit::VideoFrameFeeder : public VideoConverterListener {
public:
explicit VideoFrameFeeder(RefPtr<PipelineListener> aListener)
: mMutex("VideoFrameFeeder"), mListener(std::move(aListener)) {
MOZ_COUNT_CTOR(VideoFrameFeeder);
}
void Detach() {
MutexAutoLock lock(mMutex);
mListener = nullptr;
}
void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) override {
MutexAutoLock lock(mMutex);
if (!mListener) {
return;
}
mListener->OnVideoFrameConverted(aVideoFrame);
}
protected:
MOZ_COUNTED_DTOR_OVERRIDE(VideoFrameFeeder)
Mutex mMutex; // Protects the member below.
RefPtr<PipelineListener> mListener;
};
MediaPipelineTransmit::MediaPipelineTransmit(
const std::string& aPc, RefPtr<MediaTransportHandler> aTransportHandler,
RefPtr<nsISerialEventTarget> aMainThread,
@ -814,12 +778,6 @@ MediaPipelineTransmit::MediaPipelineTransmit(
mWatchManager(this, AbstractThread::MainThread()),
mIsVideo(aIsVideo),
mListener(new PipelineListener(mConduit)),
mFeeder(aIsVideo ? MakeAndAddRef<VideoFrameFeeder>(mListener)
: nullptr), // For video we send frames to an
// async VideoFrameConverter that
// calls back to a VideoFrameFeeder
// that feeds I420 frames to
// VideoConduit.
mDomTrack(nullptr, "MediaPipelineTransmit::mDomTrack"),
mSendTrackOverride(nullptr, "MediaPipelineTransmit::mSendTrackOverride") {
if (!IsVideo()) {
@ -828,7 +786,11 @@ MediaPipelineTransmit::MediaPipelineTransmit(
mListener->SetAudioProxy(mAudioProcessing);
} else { // Video
mConverter = MakeAndAddRef<VideoFrameConverter>(GetTimestampMaker());
mConverter->AddListener(mFeeder);
mFrameListener = mConverter->VideoFrameConvertedEvent().Connect(
mConverter->mTaskQueue,
[listener = mListener](webrtc::VideoFrame aFrame) {
listener->OnVideoFrameConverted(std::move(aFrame));
});
mListener->SetVideoFrameConverter(mConverter);
}
@ -841,9 +803,7 @@ MediaPipelineTransmit::MediaPipelineTransmit(
}
MediaPipelineTransmit::~MediaPipelineTransmit() {
if (mFeeder) {
mFeeder->Detach();
}
mFrameListener.DisconnectIfExists();
MOZ_ASSERT(!mTransmitting);
MOZ_ASSERT(!mDomTrack.Ref());

Просмотреть файл

@ -309,9 +309,9 @@ class MediaPipelineTransmit : public MediaPipeline {
WatchManager<MediaPipelineTransmit> mWatchManager;
const bool mIsVideo;
const RefPtr<PipelineListener> mListener;
const RefPtr<VideoFrameFeeder> mFeeder;
RefPtr<AudioProxyThread> mAudioProcessing;
RefPtr<VideoFrameConverter> mConverter;
MediaEventListener mFrameListener;
Watchable<RefPtr<dom::MediaStreamTrack>> mDomTrack;
// Input port connecting mDomTrack's MediaTrack to mSendTrack.
RefPtr<MediaInputPort> mSendPort;