Bug 1299515 - Use an I420BufferPool for allocations in VideoFrameConverter. r=dminor

MozReview-Commit-ID: 50evWtMNjHz

--HG--
extra : rebase_source : 3ac599a252f65fb37b953b7eb920215006a24f2c
This commit is contained in:
Andreas Pehrson 2017-11-28 11:42:33 +01:00
Родитель 66a566c5e7
Коммит 678079da9d
5 изменённых файлов: 115 добавлений и 221 удалений

Просмотреть файл

@ -102,15 +102,17 @@ public:
unsigned short height, unsigned short height,
uint64_t capture_time_ms) uint64_t capture_time_ms)
{ {
unsigned int yplane_length = width*height; rtc::scoped_refptr<webrtc::I420Buffer> buffer =
unsigned int cbcrplane_length = (width*height + 1)/2; webrtc::I420Buffer::Create(width, height);
unsigned int video_length = yplane_length + cbcrplane_length; memset(buffer->MutableDataY(), 0x10, buffer->StrideY() * buffer->height());
uint8_t* buffer = new uint8_t[video_length]; memset(buffer->MutableDataU(), 0x80, buffer->StrideU() * ((buffer->height() + 1) / 2));
memset(buffer, 0x10, yplane_length); memset(buffer->MutableDataV(), 0x80, buffer->StrideV() * ((buffer->height() + 1) / 2));
memset(buffer + yplane_length, 0x80, cbcrplane_length);
return mVideoConduit->SendVideoFrame(buffer, video_length, width, height, webrtc::VideoFrame frame(buffer,
VideoType::kVideoI420, capture_time_ms,
capture_time_ms); capture_time_ms,
webrtc::kVideoRotation_0);
return mVideoConduit->SendVideoFrame(frame);
} }
MockCall* mCall; MockCall* mCall;

Просмотреть файл

@ -375,22 +375,13 @@ public:
bool SetRemoteSSRC(unsigned int ssrc) override = 0; bool SetRemoteSSRC(unsigned int ssrc) override = 0;
/** /**
* Function to deliver a capture video frame for encoding and transport * Function to deliver a capture video frame for encoding and transport.
* @param video_frame: pointer to captured video-frame. * If the frame's timestamp is 0, it will be automatcally generated.
* @param video_frame_length: size of the frame *
* @param width, height: dimensions of the frame * NOTE: ConfigureSendMediaCodec() must be called before this function can
* @param video_type: Type of the video frame - I420, RAW * be invoked. This ensures the inserted video-frames can be
* @param captured_time: timestamp when the frame was captured. * transmitted by the conduit.
* if 0 timestamp is automatcally generated
* NOTE: ConfigureSendMediaCodec() MUST be called before this function can be invoked
* This ensures the inserted video-frames can be transmitted by the conduit
*/ */
virtual MediaConduitErrorCode SendVideoFrame(const unsigned char* video_frame,
unsigned int video_frame_length,
unsigned short width,
unsigned short height,
VideoType video_type,
uint64_t capture_time) = 0;
virtual MediaConduitErrorCode SendVideoFrame( virtual MediaConduitErrorCode SendVideoFrame(
const webrtc::VideoFrame& frame) = 0; const webrtc::VideoFrame& frame) = 0;

Просмотреть файл

@ -1782,50 +1782,6 @@ WebrtcVideoConduit::SelectSendFrameRate(const VideoCodecConfig* codecConfig,
return new_framerate; return new_framerate;
} }
MediaConduitErrorCode
WebrtcVideoConduit::SendVideoFrame(const unsigned char* video_buffer,
unsigned int video_length,
unsigned short width,
unsigned short height,
VideoType video_type,
uint64_t capture_time)
{
// check for parameter sanity
if (!video_buffer || video_length == 0 || width == 0 || height == 0) {
CSFLogError(LOGTAG, "%s Invalid Parameters ", __FUNCTION__);
MOZ_ASSERT(false);
return kMediaConduitMalformedArgument;
}
MOZ_ASSERT(video_type == VideoType::kVideoI420);
// Transmission should be enabled before we insert any frames.
if (!mEngineTransmitting) {
CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
// insert the frame to video engine in I420 format only
const int stride_y = width;
const int stride_uv = (width + 1) / 2;
const uint8_t* buffer_y = video_buffer;
const uint8_t* buffer_u = buffer_y + stride_y * height;
const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2);
rtc::Callback0<void> callback_unused;
rtc::scoped_refptr<webrtc::WrappedI420Buffer> video_frame_buffer(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
width, height,
buffer_y, stride_y,
buffer_u, stride_uv,
buffer_v, stride_uv,
callback_unused));
webrtc::VideoFrame video_frame(video_frame_buffer, capture_time,
capture_time, webrtc::kVideoRotation_0); // XXX
return SendVideoFrame(video_frame);
}
void void
WebrtcVideoConduit::AddOrUpdateSink( WebrtcVideoConduit::AddOrUpdateSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink, rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,

Просмотреть файл

@ -186,22 +186,13 @@ public:
unsigned short sending_height) const; unsigned short sending_height) const;
/** /**
* Function to deliver a capture video frame for encoding and transport * Function to deliver a capture video frame for encoding and transport.
* @param video_frame: pointer to captured video-frame. * If the frame's timestamp is 0, it will be automatically generated.
* @param video_frame_length: size of the frame *
* @param width, height: dimensions of the frame * NOTE: ConfigureSendMediaCodec() must be called before this function can
* @param video_type: Type of the video frame - I420, RAW * be invoked. This ensures the inserted video-frames can be
* @param captured_time: timestamp when the frame was captured. * transmitted by the conduit.
* if 0 timestamp is automatcally generated by the engine.
*NOTE: ConfigureSendMediaCodec() SHOULD be called before this function can be invoked
* This ensures the inserted video-frames can be transmitted by the conduit
*/ */
virtual MediaConduitErrorCode SendVideoFrame(const unsigned char* video_frame,
unsigned int video_frame_length,
unsigned short width,
unsigned short height,
VideoType video_type,
uint64_t capture_time) override;
virtual MediaConduitErrorCode SendVideoFrame( virtual MediaConduitErrorCode SendVideoFrame(
const webrtc::VideoFrame& frame) override; const webrtc::VideoFrame& frame) override;

Просмотреть файл

@ -52,6 +52,7 @@
#include "webrtc/base/bind.h" #include "webrtc/base/bind.h"
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
#include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/common_video/include/video_frame_buffer.h" #include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
@ -62,6 +63,14 @@ static_assert((WEBRTC_MAX_SAMPLE_RATE / 100) * sizeof(uint16_t) * 2
<= AUDIO_SAMPLE_BUFFER_MAX_BYTES, <= AUDIO_SAMPLE_BUFFER_MAX_BYTES,
"AUDIO_SAMPLE_BUFFER_MAX_BYTES is not large enough"); "AUDIO_SAMPLE_BUFFER_MAX_BYTES is not large enough");
// The number of frame buffers VideoFrameConverter may create before returning
// errors.
// Sometimes these are released synchronously but they can be forwarded all the
// way to the encoder for asynchronous encoding. With a pool size of 5,
// we allow 1 buffer for the current conversion, and 4 buffers to be queued at
// the encoder.
#define CONVERTER_BUFFER_POOL_SIZE 5
using namespace mozilla; using namespace mozilla;
using namespace mozilla::dom; using namespace mozilla::dom;
using namespace mozilla::gfx; using namespace mozilla::gfx;
@ -82,24 +91,12 @@ class VideoConverterListener
public: public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoConverterListener) NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoConverterListener)
virtual void OnVideoFrameConverted(const unsigned char* aVideoFrame,
unsigned int aVideoFrameLength,
unsigned short aWidth,
unsigned short aHeight,
VideoType aVideoType,
uint64_t aCaptureTime) = 0;
virtual void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) = 0; virtual void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) = 0;
protected: protected:
virtual ~VideoConverterListener() {} virtual ~VideoConverterListener() {}
}; };
// I420 buffer size macros
#define YSIZE(x, y) (CheckedInt<int>(x) * (y))
#define CRSIZE(x, y) ((((x) + 1) >> 1) * (((y) + 1) >> 1))
#define I420SIZE(x, y) (YSIZE((x), (y)) + 2 * CRSIZE((x), (y)))
// An async video frame format converter. // An async video frame format converter.
// //
// Input is typically a MediaStream(Track)Listener driven by MediaStreamGraph. // Input is typically a MediaStream(Track)Listener driven by MediaStreamGraph.
@ -119,6 +116,7 @@ public:
, mTaskQueue( , mTaskQueue(
new AutoTaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER), new AutoTaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
"VideoFrameConverter")) "VideoFrameConverter"))
, mBufferPool(false, CONVERTER_BUFFER_POOL_SIZE)
, mLastImage(-1) // -1 is not a guaranteed invalid serial. See bug 1262134. , mLastImage(-1) // -1 is not a guaranteed invalid serial. See bug 1262134.
#ifdef DEBUG #ifdef DEBUG
, mThrottleCount(0) , mThrottleCount(0)
@ -131,15 +129,43 @@ public:
void QueueVideoChunk(const VideoChunk& aChunk, bool aForceBlack) void QueueVideoChunk(const VideoChunk& aChunk, bool aForceBlack)
{ {
if (aChunk.IsNull()) { IntSize size = aChunk.mFrame.GetIntrinsicSize();
if (size.width == 0 || size.width == 0) {
return; return;
} }
// We get passed duplicate frames every ~10ms even with no frame change. if (aChunk.IsNull()) {
int32_t serial = aChunk.mFrame.GetImage()->GetSerial(); aForceBlack = true;
if (serial == mLastImage) { } else {
aForceBlack = aChunk.mFrame.GetForceBlack();
}
int32_t serial;
if (aForceBlack) {
// Reset the last-img check.
// -1 is not a guaranteed invalid serial. See bug 1262134.
serial = -1;
} else {
serial = aChunk.mFrame.GetImage()->GetSerial();
}
const double duplicateMinFps = 1.0;
TimeStamp t = aChunk.mTimeStamp;
MOZ_ASSERT(!t.IsNull());
if (!t.IsNull() &&
serial == mLastImage &&
!mLastFrameSent.IsNull() &&
(t - mLastFrameSent).ToSeconds() < (1.0 / duplicateMinFps)) {
// We get passed duplicate frames every ~10ms even with no frame change.
// After disabling, or when the source is not producing many frames,
// we still want *some* frames to flow to the other side.
// It could happen that we drop the packet that carried the first disabled
// frame, for instance. Note that this still requires the application to
// send a frame, or it doesn't trigger at all.
return; return;
} }
mLastFrameSent = t;
mLastImage = serial; mLastImage = serial;
// A throttling limit of 1 allows us to convert 2 frames concurrently. // A throttling limit of 1 allows us to convert 2 frames concurrently.
@ -180,39 +206,16 @@ public:
} }
#endif #endif
bool forceBlack = aForceBlack || aChunk.mFrame.GetForceBlack();
if (forceBlack) {
// Reset the last-img check.
// -1 is not a guaranteed invalid serial. See bug 1262134.
mLastImage = -1;
// After disabling, we still want *some* frames to flow to the other side.
// It could happen that we drop the packet that carried the first disabled
// frame, for instance. Note that this still requires the application to
// send a frame, or it doesn't trigger at all.
const double disabledMinFps = 1.0;
TimeStamp t = aChunk.mTimeStamp;
MOZ_ASSERT(!t.IsNull());
if (!mDisabledFrameSent.IsNull() &&
(t - mDisabledFrameSent).ToSeconds() < (1.0 / disabledMinFps)) {
return;
}
mDisabledFrameSent = t;
} else {
// This sets it to the Null time.
mDisabledFrameSent = TimeStamp();
}
++mLength; // Atomic ++mLength; // Atomic
nsCOMPtr<nsIRunnable> runnable = nsCOMPtr<nsIRunnable> runnable =
NewRunnableMethod<StoreRefPtrPassByPtr<Image>, bool>( NewRunnableMethod<StoreRefPtrPassByPtr<Image>, IntSize, bool>(
"VideoFrameConverter::ProcessVideoFrame", "VideoFrameConverter::ProcessVideoFrame",
this, this,
&VideoFrameConverter::ProcessVideoFrame, &VideoFrameConverter::ProcessVideoFrame,
aChunk.mFrame.GetImage(), aChunk.mFrame.GetImage(),
forceBlack); size,
aForceBlack);
nsresult rv = mTaskQueue->Dispatch(runnable.forget()); nsresult rv = mTaskQueue->Dispatch(runnable.forget());
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv)); MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv; Unused << rv;
@ -295,38 +298,33 @@ protected:
} }
} }
void ProcessVideoFrame(Image* aImage, bool aForceBlack) void ProcessVideoFrame(Image* aImage, IntSize aSize, bool aForceBlack)
{ {
--mLength; // Atomic --mLength; // Atomic
MOZ_ASSERT(mLength >= 0); MOZ_ASSERT(mLength >= 0);
if (aForceBlack) { if (aForceBlack) {
IntSize size = aImage->GetSize(); // Send a black image.
CheckedInt<int> yPlaneLen = YSIZE(size.width, size.height); rtc::scoped_refptr<webrtc::I420Buffer> buffer =
// doesn't need to be CheckedInt, any overflow will be caught by YSIZE mBufferPool.CreateBuffer(aSize.width, aSize.height);
int cbcrPlaneLen = 2 * CRSIZE(size.width, size.height); if (!buffer) {
CheckedInt<int> length = yPlaneLen + cbcrPlaneLen; MOZ_DIAGNOSTIC_ASSERT(false, "Buffers not leaving scope except for "
"reconfig, should never leak");
if (!yPlaneLen.isValid() || !length.isValid()) { CSFLogWarn(LOGTAG, "Creating a buffer for a black video frame failed");
return; return;
} }
// Send a black image. CSFLogDebug(LOGTAG, "Sending a black video frame");
auto pixelData = MakeUniqueFallible<uint8_t[]>(length.value()); webrtc::I420Buffer::SetBlack(buffer);
if (pixelData) { webrtc::VideoFrame frame(buffer,
// YCrCb black = 0x10 0x80 0x80 0, 0, // not setting timestamps
memset(pixelData.get(), 0x10, yPlaneLen.value()); webrtc::kVideoRotation_0);
// Fill Cb/Cr planes VideoFrameConverted(frame);
memset(pixelData.get() + yPlaneLen.value(), 0x80, cbcrPlaneLen); return;
}
CSFLogDebug(LOGTAG, "Sending a black video frame"); if (!aImage) {
VideoFrameConverted(Move(pixelData), MOZ_ASSERT_UNREACHABLE("Must have image if not forcing black");
length.value(),
size.width,
size.height,
mozilla::kVideoI420,
0);
}
return; return;
} }
@ -386,22 +384,17 @@ protected:
return; return;
} }
IntSize size = aImage->GetSize(); if (aImage->GetSize() != aSize) {
// these don't need to be CheckedInt, any overflow will be caught by YSIZE MOZ_DIAGNOSTIC_ASSERT(false, "Unexpected intended size");
int half_width = (size.width + 1) >> 1;
int half_height = (size.height + 1) >> 1;
int c_size = half_width * half_height;
CheckedInt<int> buffer_size = YSIZE(size.width, size.height) + 2 * c_size;
if (!buffer_size.isValid()) {
return; return;
} }
auto yuv_scoped = MakeUniqueFallible<uint8[]>(buffer_size.value()); rtc::scoped_refptr<webrtc::I420Buffer> buffer =
if (!yuv_scoped) { mBufferPool.CreateBuffer(aSize.width, aSize.height);
if (!buffer) {
CSFLogWarn(LOGTAG, "Creating a buffer for a black video frame failed");
return; return;
} }
uint8* yuv = yuv_scoped.get();
DataSourceSurface::ScopedMap map(data, DataSourceSurface::READ); DataSourceSurface::ScopedMap map(data, DataSourceSurface::READ);
if (!map.IsMapped()) { if (!map.IsMapped()) {
@ -415,33 +408,31 @@ protected:
} }
int rv; int rv;
int cb_offset = YSIZE(size.width, size.height).value();
int cr_offset = cb_offset + c_size;
switch (surf->GetFormat()) { switch (surf->GetFormat()) {
case SurfaceFormat::B8G8R8A8: case SurfaceFormat::B8G8R8A8:
case SurfaceFormat::B8G8R8X8: case SurfaceFormat::B8G8R8X8:
rv = libyuv::ARGBToI420(static_cast<uint8*>(map.GetData()), rv = libyuv::ARGBToI420(static_cast<uint8*>(map.GetData()),
map.GetStride(), map.GetStride(),
yuv, buffer->MutableDataY(),
size.width, buffer->StrideY(),
yuv + cb_offset, buffer->MutableDataU(),
half_width, buffer->StrideU(),
yuv + cr_offset, buffer->MutableDataV(),
half_width, buffer->StrideV(),
size.width, aSize.width,
size.height); aSize.height);
break; break;
case SurfaceFormat::R5G6B5_UINT16: case SurfaceFormat::R5G6B5_UINT16:
rv = libyuv::RGB565ToI420(static_cast<uint8*>(map.GetData()), rv = libyuv::RGB565ToI420(static_cast<uint8*>(map.GetData()),
map.GetStride(), map.GetStride(),
yuv, buffer->MutableDataY(),
size.width, buffer->StrideY(),
yuv + cb_offset, buffer->MutableDataU(),
half_width, buffer->StrideU(),
yuv + cr_offset, buffer->MutableDataV(),
half_width, buffer->StrideV(),
size.width, aSize.width,
size.height); aSize.height);
break; break;
default: default:
CSFLogError(LOGTAG, CSFLogError(LOGTAG,
@ -459,20 +450,19 @@ protected:
CSFLogDebug(LOGTAG, CSFLogDebug(LOGTAG,
"Sending an I420 video frame converted from %s", "Sending an I420 video frame converted from %s",
Stringify(surf->GetFormat()).c_str()); Stringify(surf->GetFormat()).c_str());
VideoFrameConverted(Move(yuv_scoped), webrtc::VideoFrame frame(buffer,
buffer_size.value(), 0, 0, // not setting timestamps
size.width, webrtc::kVideoRotation_0);
size.height, VideoFrameConverted(frame);
mozilla::kVideoI420,
0);
} }
Atomic<int32_t, Relaxed> mLength; Atomic<int32_t, Relaxed> mLength;
const RefPtr<AutoTaskQueue> mTaskQueue; const RefPtr<AutoTaskQueue> mTaskQueue;
webrtc::I420BufferPool mBufferPool;
// Written and read from the queueing thread (normally MSG). // Written and read from the queueing thread (normally MSG).
int32_t mLastImage; // serial number of last Image int32_t mLastImage; // serial number of last Image
TimeStamp mDisabledFrameSent; // The time we sent the last disabled frame. TimeStamp mLastFrameSent; // The time we sent the last frame.
#ifdef DEBUG #ifdef DEBUG
uint32_t mThrottleCount; uint32_t mThrottleCount;
uint32_t mThrottleRecord; uint32_t mThrottleRecord;
@ -1358,22 +1348,6 @@ public:
mConverter = aConverter; mConverter = aConverter;
} }
void OnVideoFrameConverted(const unsigned char* aVideoFrame,
unsigned int aVideoFrameLength,
unsigned short aWidth,
unsigned short aHeight,
VideoType aVideoType,
uint64_t aCaptureTime)
{
MOZ_RELEASE_ASSERT(mConduit->type() == MediaSessionConduit::VIDEO);
static_cast<VideoSessionConduit*>(mConduit.get())
->SendVideoFrame(aVideoFrame,
aVideoFrameLength,
aWidth,
aHeight,
aVideoType,
aCaptureTime);
}
void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame)
{ {
@ -1438,26 +1412,6 @@ public:
mListener = nullptr; mListener = nullptr;
} }
void OnVideoFrameConverted(const unsigned char* aVideoFrame,
unsigned int aVideoFrameLength,
unsigned short aWidth,
unsigned short aHeight,
VideoType aVideoType,
uint64_t aCaptureTime) override
{
MutexAutoLock lock(mMutex);
if (!mListener) {
return;
}
mListener->OnVideoFrameConverted(aVideoFrame,
aVideoFrameLength,
aWidth,
aHeight,
aVideoType,
aCaptureTime);
}
void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) override void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) override
{ {