Bug 1388219 - down scale camera output frame to the target capability. r=jib

MozReview-Commit-ID: BpAhwYrgHtA

--HG--
extra : rebase_source : 0213c8c820765898a0509ec7845c487d7fa0c230
This commit is contained in:
Munro Mengjue Chiang 2017-11-17 23:48:49 +08:00
Родитель f02f21154e
Коммит aa4f8e8705
5 изменённых файлов: 155 добавлений и 91 удалений

Просмотреть файл

@ -52,7 +52,7 @@ ResolutionFeasibilityDistance(int32_t candidate, int32_t requested)
if (candidate >= requested) {
distance = (candidate - requested) * 1000 / std::max(candidate, requested);
} else {
distance = (UINT32_MAX / 2) + (requested - candidate) *
distance = 10000 + (requested - candidate) *
1000 / std::max(candidate, requested);
}
return distance;
@ -862,14 +862,14 @@ CamerasParent::RecvStartCapture(const CaptureEngine& aCapEngine,
capability.codecType = static_cast<webrtc::VideoCodecType>(ipcCaps.codecType());
capability.interlaced = ipcCaps.interlaced();
if (aCapEngine == CameraEngine) {
#ifdef DEBUG
auto deviceUniqueID = sDeviceUniqueIDs.find(capnum);
MOZ_ASSERT(deviceUniqueID == sDeviceUniqueIDs.end());
auto deviceUniqueID = sDeviceUniqueIDs.find(capnum);
MOZ_ASSERT(deviceUniqueID == sDeviceUniqueIDs.end());
#endif
sDeviceUniqueIDs.emplace(capnum, cap.VideoCapture()->CurrentDeviceName());
sAllRequestedCapabilities.emplace(capnum, capability);
sDeviceUniqueIDs.emplace(capnum, cap.VideoCapture()->CurrentDeviceName());
sAllRequestedCapabilities.emplace(capnum, capability);
if (aCapEngine == CameraEngine) {
for (const auto &it : sDeviceUniqueIDs) {
if (strcmp(it.second, cap.VideoCapture()->CurrentDeviceName()) == 0) {
capability.width = std::max(
@ -908,6 +908,16 @@ CamerasParent::RecvStartCapture(const CaptureEngine& aCapEngine,
}
MOZ_ASSERT(minIdx != -1);
capability = candidateCapabilities->second[minIdx];
} else if (aCapEngine == ScreenEngine ||
aCapEngine == BrowserEngine ||
aCapEngine == WinEngine ||
aCapEngine == AppEngine) {
for (const auto &it : sDeviceUniqueIDs) {
if (strcmp(it.second, cap.VideoCapture()->CurrentDeviceName()) == 0) {
capability.maxFPS = std::max(
capability.maxFPS, sAllRequestedCapabilities[it.first].maxFPS);
}
}
}
error = cap.VideoCapture()->StartCapture(capability);
@ -949,16 +959,14 @@ CamerasParent::StopCapture(const CaptureEngine& aCapEngine,
mCallbacks[i - 1]->mStreamId == (uint32_t)capnum) {
CallbackHelper* cbh = mCallbacks[i-1];
engine->WithEntry(capnum,[cbh, &capnum, &aCapEngine](VideoEngine::CaptureEntry& cap){
engine->WithEntry(capnum,[cbh, &capnum](VideoEngine::CaptureEntry& cap){
if (cap.VideoCapture()) {
cap.VideoCapture()->DeRegisterCaptureDataCallback(
static_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(cbh));
cap.VideoCapture()->StopCaptureIfAllClientsClose();
if (aCapEngine == CameraEngine) {
sDeviceUniqueIDs.erase(capnum);
sAllRequestedCapabilities.erase(capnum);
}
sDeviceUniqueIDs.erase(capnum);
sAllRequestedCapabilities.erase(capnum);
}
});

Просмотреть файл

@ -25,10 +25,20 @@ bool MediaEngineCameraVideoSource::AppendToTrack(SourceMediaStream* aSource,
const PrincipalHandle& aPrincipalHandle)
{
MOZ_ASSERT(aSource);
MOZ_ASSERT(aImage);
if (!aImage) {
return 0;
}
VideoSegment segment;
RefPtr<layers::Image> image = aImage;
IntSize size(image ? mWidth : 0, image ? mHeight : 0);
IntSize size = image->GetSize();
if (!size.width || !size.height) {
return 0;
}
segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
// This is safe from any thread, and is safe if the track is Finished

Просмотреть файл

@ -140,6 +140,7 @@ protected:
nsTArray<RefPtr<SourceMediaStream>> mSources; // When this goes empty, we shut down HW
nsTArray<PrincipalHandle> mPrincipalHandles; // Directly mapped to mSources.
RefPtr<layers::Image> mImage;
nsTArray<RefPtr<layers::Image>> mImages;
nsTArray<webrtc::CaptureCapability> mTargetCapabilities;
nsTArray<uint64_t> mHandleIds;
RefPtr<layers::ImageContainer> mImageContainer;

Просмотреть файл

@ -10,6 +10,9 @@
#include "nsIPrefService.h"
#include "MediaTrackConstraints.h"
#include "CamerasChild.h"
#include "VideoFrameUtils.h"
#include "webrtc/api/video/i420_buffer.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
extern mozilla::LogModule* GetMediaManagerLog();
#define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
@ -84,6 +87,7 @@ MediaEngineRemoteVideoSource::Shutdown()
MOZ_ASSERT(mPrincipalHandles.IsEmpty());
MOZ_ASSERT(mTargetCapabilities.IsEmpty());
MOZ_ASSERT(mHandleIds.IsEmpty());
MOZ_ASSERT(mImages.IsEmpty());
break;
}
source = mSources[0];
@ -132,6 +136,7 @@ MediaEngineRemoteVideoSource::Allocate(
MOZ_ASSERT(mPrincipalHandles.IsEmpty());
MOZ_ASSERT(mTargetCapabilities.IsEmpty());
MOZ_ASSERT(mHandleIds.IsEmpty());
MOZ_ASSERT(mImages.IsEmpty());
LOG(("Video device %d reallocated", mCaptureIndex));
} else {
LOG(("Video device %d allocated shared", mCaptureIndex));
@ -174,16 +179,21 @@ MediaEngineRemoteVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
return NS_ERROR_FAILURE;
}
mImageContainer =
layers::LayerManager::CreateImageContainer(layers::ImageContainer::ASYNCHRONOUS);
{
MonitorAutoLock lock(mMonitor);
mSources.AppendElement(aStream);
mPrincipalHandles.AppendElement(aPrincipalHandle);
mTargetCapabilities.AppendElement(mTargetCapability);
mHandleIds.AppendElement(mHandleId);
mImages.AppendElement(mImageContainer->CreatePlanarYCbCrImage());
MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
MOZ_ASSERT(mSources.Length() == mImages.Length());
}
aStream->AddTrack(aID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
@ -191,8 +201,6 @@ MediaEngineRemoteVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
if (mState == kStarted) {
return NS_OK;
}
mImageContainer =
layers::LayerManager::CreateImageContainer(layers::ImageContainer::ASYNCHRONOUS);
mState = kStarted;
mTrackID = aID;
@ -231,10 +239,12 @@ MediaEngineRemoteVideoSource::Stop(mozilla::SourceMediaStream* aSource,
MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
MOZ_ASSERT(mSources.Length() == mImages.Length());
mSources.RemoveElementAt(i);
mPrincipalHandles.RemoveElementAt(i);
mTargetCapabilities.RemoveElementAt(i);
mHandleIds.RemoveElementAt(i);
mImages.RemoveElementAt(i);
aSource->EndTrack(aID);
@ -318,7 +328,12 @@ MediaEngineRemoteVideoSource::UpdateSingleSource(
}
if (index != mHandleIds.NoIndex) {
MonitorAutoLock lock(mMonitor);
mTargetCapabilities[index] = mTargetCapability;
MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
MOZ_ASSERT(mSources.Length() == mImages.Length());
}
if (!ChooseCapability(aNetConstraints, aPrefs, aDeviceId, mCapability,
@ -385,18 +400,22 @@ MediaEngineRemoteVideoSource::NotifyPull(MediaStreamGraph* aGraph,
TrackID aID, StreamTime aDesiredTime,
const PrincipalHandle& aPrincipalHandle)
{
VideoSegment segment;
StreamTime delta = 0;
size_t i;
MonitorAutoLock lock(mMonitor);
if (mState != kStarted) {
return;
}
StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
i = mSources.IndexOf(aSource);
if (i == mSources.NoIndex) {
return;
}
delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
if (delta > 0) {
// nullptr images are allowed
AppendToTrack(aSource, mImage, aID, delta, aPrincipalHandle);
AppendToTrack(aSource, mImages[i], aID, delta, aPrincipalHandle);
}
}
@ -419,11 +438,12 @@ MediaEngineRemoteVideoSource::FrameSizeChange(unsigned int w, unsigned int h)
}
int
MediaEngineRemoteVideoSource::DeliverFrame(uint8_t* aBuffer ,
MediaEngineRemoteVideoSource::DeliverFrame(uint8_t* aBuffer,
const camera::VideoFrameProperties& aProps)
{
MonitorAutoLock lock(mMonitor);
// Check for proper state.
if (mState != kStarted) {
if (mState != kStarted || !mImageContainer) {
LOG(("DeliverFrame: video not started"));
return 0;
}
@ -431,51 +451,114 @@ MediaEngineRemoteVideoSource::DeliverFrame(uint8_t* aBuffer ,
// Update the dimensions
FrameSizeChange(aProps.width(), aProps.height());
layers::PlanarYCbCrData data;
RefPtr<layers::PlanarYCbCrImage> image;
{
// We grab the lock twice, but don't hold it across the (long) CopyData
MonitorAutoLock lock(mMonitor);
if (!mImageContainer) {
LOG(("DeliverFrame() called after Stop()!"));
return 0;
}
// Create a video frame and append it to the track.
image = mImageContainer->CreatePlanarYCbCrImage();
MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
MOZ_ASSERT(mSources.Length() == mImages.Length());
for (uint32_t i = 0; i < mTargetCapabilities.Length(); i++ ) {
int32_t req_max_width = mTargetCapabilities[i].width & 0xffff;
int32_t req_max_height = mTargetCapabilities[i].height & 0xffff;
int32_t req_ideal_width = (mTargetCapabilities[i].width >> 16) & 0xffff;
int32_t req_ideal_height = (mTargetCapabilities[i].height >> 16) & 0xffff;
int32_t dest_max_width = std::min(req_max_width, mWidth);
int32_t dest_max_height = std::min(req_max_height, mHeight);
// This logic works for both camera and screen sharing case.
// for camera case, req_ideal_width and req_ideal_height is 0.
// The following snippet will set dst_width to dest_max_width and dst_height to dest_max_height
int32_t dst_width = std::min(req_ideal_width > 0 ? req_ideal_width : mWidth, dest_max_width);
int32_t dst_height = std::min(req_ideal_height > 0 ? req_ideal_height : mHeight, dest_max_height);
int dst_stride_y = dst_width;
int dst_stride_uv = (dst_width + 1) / 2;
camera::VideoFrameProperties properties;
uint8_t* frame;
bool needReScale = !((dst_width == mWidth && dst_height == mHeight) ||
(dst_width > mWidth || dst_height > mHeight));
if (!needReScale) {
dst_width = mWidth;
dst_height = mHeight;
frame = aBuffer;
} else {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer;
i420Buffer = webrtc::I420Buffer::Create(mWidth, mHeight, mWidth,
(mWidth + 1) / 2, (mWidth + 1) / 2);
const int conversionResult = webrtc::ConvertToI420(webrtc::kI420,
aBuffer,
0, 0, // No cropping
mWidth, mHeight,
mWidth * mHeight * 3 / 2,
webrtc::kVideoRotation_0,
i420Buffer.get());
webrtc::VideoFrame captureFrame(i420Buffer, 0, 0, webrtc::kVideoRotation_0);
if (conversionResult < 0) {
return 0;
}
rtc::scoped_refptr<webrtc::I420Buffer> scaledBuffer;
scaledBuffer = webrtc::I420Buffer::Create(dst_width, dst_height, dst_stride_y,
dst_stride_uv, dst_stride_uv);
scaledBuffer->CropAndScaleFrom(*captureFrame.video_frame_buffer().get());
webrtc::VideoFrame scaledFrame(scaledBuffer, 0, 0, webrtc::kVideoRotation_0);
VideoFrameUtils::InitFrameBufferProperties(scaledFrame, properties);
frame = new unsigned char[properties.bufferSize()];
if (!frame) {
return 0;
}
VideoFrameUtils::CopyVideoFrameBuffers(frame,
properties.bufferSize(), scaledFrame);
}
// Create a video frame and append it to the track.
RefPtr<layers::PlanarYCbCrImage> image = mImageContainer->CreatePlanarYCbCrImage();
uint8_t* frame = static_cast<uint8_t*> (aBuffer);
const uint8_t lumaBpp = 8;
const uint8_t chromaBpp = 4;
layers::PlanarYCbCrData data;
// Take lots of care to round up!
data.mYChannel = frame;
data.mYSize = IntSize(mWidth, mHeight);
data.mYStride = (mWidth * lumaBpp + 7)/ 8;
data.mCbCrStride = (mWidth * chromaBpp + 7) / 8;
data.mCbChannel = frame + mHeight * data.mYStride;
data.mCrChannel = data.mCbChannel + ((mHeight+1)/2) * data.mCbCrStride;
data.mCbCrSize = IntSize((mWidth+1)/ 2, (mHeight+1)/ 2);
data.mYSize = IntSize(dst_width, dst_height);
data.mYStride = (dst_width * lumaBpp + 7) / 8;
data.mCbCrStride = (dst_width * chromaBpp + 7) / 8;
data.mCbChannel = frame + dst_height * data.mYStride;
data.mCrChannel = data.mCbChannel + ((dst_height + 1) / 2) * data.mCbCrStride;
data.mCbCrSize = IntSize((dst_width + 1) / 2, (dst_height + 1) / 2);
data.mPicX = 0;
data.mPicY = 0;
data.mPicSize = IntSize(mWidth, mHeight);
data.mPicSize = IntSize(dst_width, dst_height);
data.mStereoMode = StereoMode::MONO;
}
if (!image->CopyData(data)) {
MOZ_ASSERT(false);
return 0;
}
if (!image->CopyData(data)) {
MOZ_ASSERT(false);
return 0;
}
if (needReScale && frame) {
delete frame;
frame = nullptr;
}
MonitorAutoLock lock(mMonitor);
#ifdef DEBUG
static uint32_t frame_num = 0;
LOGFRAME(("frame %d (%dx%d); timeStamp %u, ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
frame_num++, mWidth, mHeight,
aProps.timeStamp(), aProps.ntpTimeMs(), aProps.renderTimeMs()));
static uint32_t frame_num = 0;
LOGFRAME(("frame %d (%dx%d); timeStamp %u, ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
frame_num++, mWidth, mHeight,
aProps.timeStamp(), aProps.ntpTimeMs(), aProps.renderTimeMs()));
#endif
// implicitly releases last image
mImage = image.forget();
// implicitly releases last image
mImages[i] = image.forget();
}
// We'll push the frame into the MSG on the next NotifyPull. This will avoid
// swamping the MSG with frames should it be taking longer than normal to run

Просмотреть файл

@ -582,45 +582,7 @@ int32_t DesktopCaptureImpl::IncomingFrame(uint8_t* videoFrame,
return -1;
}
int32_t req_max_width = _requestedCapability.width & 0xffff;
int32_t req_max_height = _requestedCapability.height & 0xffff;
int32_t req_ideal_width = (_requestedCapability.width >> 16) & 0xffff;
int32_t req_ideal_height = (_requestedCapability.height >> 16) & 0xffff;
int32_t dest_max_width = std::min(req_max_width, target_width);
int32_t dest_max_height = std::min(req_max_height, target_height);
int32_t dst_width = std::min(req_ideal_width > 0 ? req_ideal_width : target_width, dest_max_width);
int32_t dst_height = std::min(req_ideal_height > 0 ? req_ideal_height : target_height, dest_max_height);
// scale to average of portrait and landscape
float scale_width = (float)dst_width / (float)target_width;
float scale_height = (float)dst_height / (float)target_height;
float scale = (scale_width + scale_height) / 2;
dst_width = (int)(scale * target_width);
dst_height = (int)(scale * target_height);
// if scaled rectangle exceeds max rectangle, scale to minimum of portrait and landscape
if (dst_width > dest_max_width || dst_height > dest_max_height) {
scale_width = (float)dest_max_width / (float)dst_width;
scale_height = (float)dest_max_height / (float)dst_height;
scale = std::min(scale_width, scale_height);
dst_width = (int)(scale * dst_width);
dst_height = (int)(scale * dst_height);
}
int dst_stride_y = dst_width;
int dst_stride_uv = (dst_width + 1) / 2;
if (dst_width == target_width && dst_height == target_height) {
DeliverCapturedFrame(captureFrame, captureTime);
} else {
rtc::scoped_refptr<webrtc::I420Buffer> buffer;
buffer = I420Buffer::Create(dst_width, dst_height, dst_stride_y,
dst_stride_uv, dst_stride_uv);
buffer->ScaleFrom(*captureFrame.video_frame_buffer().get());
webrtc::VideoFrame scaledFrame(buffer, 0, 0, kVideoRotation_0);
DeliverCapturedFrame(scaledFrame, captureTime);
}
DeliverCapturedFrame(captureFrame, captureTime);
} else {
assert(false);
return -1;