Bug 938034 - Enable new gUM recording callback. r=roc

This commit is contained in:
Alfredo Yang 2014-12-15 01:01:00 -05:00
Родитель c623645cee
Коммит f089a2fa04
7 изменённых файлов: 199 добавлений и 29 удалений

Просмотреть файл

@ -841,6 +841,12 @@ nsGonkCameraControl::SetThumbnailSizeImpl(const Size& aSize)
return SetAndPush(CAMERA_PARAM_THUMBNAILSIZE, size);
}
android::sp<android::GonkCameraHardware>
nsGonkCameraControl::GetCameraHw()
{
return mCameraHw;
}
nsresult
nsGonkCameraControl::SetThumbnailSize(const Size& aSize)
{

Просмотреть файл

@ -32,6 +32,7 @@ namespace android {
class GonkCameraHardware;
class MediaProfiles;
class GonkRecorder;
class GonkCameraSource;
}
namespace mozilla {
@ -152,6 +153,9 @@ protected:
nsresult UpdateThumbnailSize();
nsresult SetThumbnailSizeImpl(const Size& aSize);
friend class android::GonkCameraSource;
android::sp<android::GonkCameraHardware> GetCameraHw();
int32_t RationalizeRotation(int32_t aRotation);
uint32_t mCameraId;

Просмотреть файл

@ -46,6 +46,7 @@
#include "GonkCameraSource.h"
#include "GonkCameraListener.h"
#include "GonkCameraHwMgr.h"
#include "ICameraControl.h"
using namespace mozilla;
@ -157,6 +158,16 @@ GonkCameraSource *GonkCameraSource::Create(
return source;
}
GonkCameraSource *GonkCameraSource::Create(
ICameraControl* aControl,
Size videoSize,
int32_t frameRate)
{
mozilla::nsGonkCameraControl* control =
static_cast<mozilla::nsGonkCameraControl*>(aControl);
return Create(control->GetCameraHw(), videoSize, frameRate, false);
}
GonkCameraSource::GonkCameraSource(
const sp<GonkCameraHardware>& aCameraHw,
Size videoSize,
@ -596,6 +607,10 @@ status_t GonkCameraSource::reset() {
}
releaseCamera();
if (mDirectBufferListener.get()) {
mDirectBufferListener = nullptr;
}
if (mCollectStats) {
CS_LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
@ -652,6 +667,14 @@ void GonkCameraSource::signalBufferReturned(MediaBuffer *buffer) {
CHECK(!"signalBufferReturned: bogus buffer");
}
status_t GonkCameraSource::AddDirectBufferListener(DirectBufferListener* aListener) {
if (mDirectBufferListener.get()) {
return UNKNOWN_ERROR;
}
mDirectBufferListener = aListener;
return OK;
}
status_t GonkCameraSource::read(
MediaBuffer **buffer, const ReadOptions *options) {
CS_LOGV("read");
@ -761,6 +784,15 @@ void GonkCameraSource::dataCallbackTimestamp(int64_t timestampUs,
if(prevRateLimit != rateLimit) {
mCameraHw->OnRateLimitPreview(rateLimit);
}
if (mDirectBufferListener.get()) {
MediaBuffer* mediaBuffer;
if (read(&mediaBuffer) == OK) {
mDirectBufferListener->BufferAvailable(mediaBuffer);
// read() calls MediaBuffer->add_ref() so it needs to be released here.
mediaBuffer->release();
}
}
}
bool GonkCameraSource::isMetaDataStoredInVideoBuffers() const {

Просмотреть файл

@ -27,6 +27,10 @@
#include "GonkCameraHwMgr.h"
namespace mozilla {
class ICameraControl;
}
namespace android {
class IMemory;
@ -39,6 +43,10 @@ public:
int32_t frameRate,
bool storeMetaDataInVideoBuffers = false);
static GonkCameraSource *Create(mozilla::ICameraControl* aControl,
Size videoSize,
int32_t frameRate);
virtual ~GonkCameraSource();
virtual status_t start(MetaData *params = NULL);
@ -75,6 +83,24 @@ public:
virtual void signalBufferReturned(MediaBuffer* buffer);
/**
* It sends recording frames to listener directly in the same thread.
* Because recording frame is critical resource and it should not be
* propagated to other thread as much as possible or there could be frame
* rate jitter due to camera HAL waiting for resource.
*/
class DirectBufferListener : public RefBase {
public:
DirectBufferListener() {};
virtual status_t BufferAvailable(MediaBuffer* aBuffer) = 0;
protected:
virtual ~DirectBufferListener() {}
};
status_t AddDirectBufferListener(DirectBufferListener* aListener);
protected:
enum CameraFlags {
@ -136,6 +162,7 @@ private:
bool mCollectStats;
bool mIsMetaDataStoredInVideoBuffers;
sp<GonkCameraHardware> mCameraHw;
sp<DirectBufferListener> mDirectBufferListener;
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);

Просмотреть файл

@ -18,6 +18,7 @@ namespace mozilla {
using namespace mozilla::dom;
using namespace mozilla::gfx;
using namespace android;
#ifdef PR_LOGGING
extern PRLogModuleInfo* GetMediaManagerLog();
@ -28,6 +29,29 @@ extern PRLogModuleInfo* GetMediaManagerLog();
#define LOGFRAME(msg)
#endif
class MediaBufferListener : public GonkCameraSource::DirectBufferListener {
public:
MediaBufferListener(MediaEngineGonkVideoSource* aMediaEngine)
: mMediaEngine(aMediaEngine)
{
}
status_t BufferAvailable(MediaBuffer* aBuffer)
{
nsresult rv = mMediaEngine->OnNewMediaBufferFrame(aBuffer);
if (NS_SUCCEEDED(rv)) {
return OK;
}
return UNKNOWN_ERROR;
}
~MediaBufferListener()
{
}
nsRefPtr<MediaEngineGonkVideoSource> mMediaEngine;
};
// We are subclassed from CameraControlListener, which implements a
// threadsafe reference-count for us.
NS_IMPL_QUERY_INTERFACE(MediaEngineGonkVideoSource, nsISupports)
@ -164,6 +188,46 @@ MediaEngineGonkVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
return NS_ERROR_FAILURE;
}
if (NS_FAILED(InitDirectMediaBuffer())) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult
MediaEngineGonkVideoSource::InitDirectMediaBuffer()
{
// Check available buffer resolution.
nsTArray<ICameraControl::Size> videoSizes;
mCameraControl->Get(CAMERA_PARAM_SUPPORTED_VIDEOSIZES, videoSizes);
if (!videoSizes.Length()) {
return NS_ERROR_FAILURE;
}
// TODO: MediaEgnine should use supported recording frame sizes as the size
// range in MediaTrackConstraintSet and find the best match.
// Here we use the first one as the default size (largest supported size).
android::Size videoSize;
videoSize.width = videoSizes[0].width;
videoSize.height = videoSizes[0].height;
LOG(("Intial size, width: %d, height: %d", videoSize.width, videoSize.height));
mCameraSource = GonkCameraSource::Create(mCameraControl,
videoSize,
MediaEngine::DEFAULT_VIDEO_FPS);
status_t rv;
rv = mCameraSource->AddDirectBufferListener(new MediaBufferListener(this));
if (rv != OK) {
return NS_ERROR_FAILURE;
}
rv = mCameraSource->start(nullptr);
if (rv != OK) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
@ -345,6 +409,9 @@ void
MediaEngineGonkVideoSource::StopImpl() {
MOZ_ASSERT(NS_IsMainThread());
mCameraSource->stop();
mCameraSource = nullptr;
hal::UnregisterScreenConfigurationObserver(this);
mCameraControl->Stop();
}
@ -576,16 +643,16 @@ MediaEngineGonkVideoSource::ConvertPixelFormatToFOURCC(int aFormat)
void
MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth, uint32_t aHeight) {
layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(aImage);
android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
sp<GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
void *pMem = nullptr;
uint32_t size = aWidth * aHeight * 3 / 2;
graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &pMem);
graphicBuffer->lock(GraphicBuffer::USAGE_SW_READ_MASK, &pMem);
uint8_t* srcPtr = static_cast<uint8_t*>(pMem);
// Create a video frame and append it to the track.
nsRefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
nsRefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::GONK_CAMERA_IMAGE);
GonkCameraImage* videoImage = static_cast<GonkCameraImage*>(image.get());
uint32_t dstWidth;
uint32_t dstHeight;
@ -629,23 +696,8 @@ MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth,
videoImage->SetDataNoCopy(data);
// implicitly releases last image
// Implicitly releases last preview image.
mImage = image.forget();
// Push the frame into the MSG with a minimal duration. This will likely
// mean we'll still get NotifyPull calls which will then return the same
// frame again with a longer duration. However, this means we won't
// fail to get the frame in and drop frames.
// XXX The timestamp for the frame should be base on the Capture time,
// not the MSG time, and MSG should never, ever block on a (realtime)
// video frame (or even really for streaming - audio yes, video probably no).
uint32_t len = mSources.Length();
for (uint32_t i = 0; i < len; i++) {
if (mSources[i]) {
AppendToTrack(mSources[i], mImage, mTrackID, 1); // shortest possible duration
}
}
}
bool
@ -674,4 +726,40 @@ MediaEngineGonkVideoSource::OnNewPreviewFrame(layers::Image* aImage, uint32_t aW
return true; // return true because we're accepting the frame
}
nsresult
MediaEngineGonkVideoSource::OnNewMediaBufferFrame(MediaBuffer* aBuffer)
{
{
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
if (mState == kStopped) {
return NS_OK;
}
}
MonitorAutoLock enter(mMonitor);
if (mImage) {
GonkCameraImage* cameraImage = static_cast<GonkCameraImage*>(mImage.get());
cameraImage->SetBuffer(aBuffer);
uint32_t len = mSources.Length();
for (uint32_t i = 0; i < len; i++) {
if (mSources[i]) {
// Duration is 1 here.
// Ideally, it should be camera timestamp here and the MSG will have
// enough sample duration without calling NotifyPull() anymore.
// Unfortunately, clock in gonk camera looks like is a different one
// comparing to MSG. As result, it causes time inaccurate. (frames be
// queued in MSG longer and longer as time going by in device like Frame)
AppendToTrack(mSources[i], cameraImage, mTrackID, 1);
}
}
// Clear MediaBuffer immediately, it prevents MediaBuffer is kept in
// MediaStreamGraph thread.
cameraImage->ClearBuffer();
}
return NS_OK;
}
} // namespace mozilla

Просмотреть файл

@ -15,6 +15,11 @@
#include "mozilla/Hal.h"
#include "mozilla/ReentrantMonitor.h"
#include "mozilla/dom/File.h"
#include "GonkCameraSource.h"
namespace android {
class MOZ_EXPORT MediaBuffer;
}
namespace mozilla {
@ -90,6 +95,12 @@ public:
// current screen orientation.
nsresult UpdatePhotoOrientation();
// It adds aBuffer to current preview image and sends this image to MediaStreamDirectListener
// via AppendToTrack(). Due to MediaBuffer is limited resource, it will clear
// image's MediaBuffer by calling GonkCameraImage::ClearBuffer() before leaving
// this function.
nsresult OnNewMediaBufferFrame(android::MediaBuffer* aBuffer);
protected:
~MediaEngineGonkVideoSource()
{
@ -100,12 +111,17 @@ protected:
void Shutdown();
void ChooseCapability(const VideoTrackConstraintsN& aConstraints,
const MediaEnginePrefs& aPrefs);
// Initialize the recording frame (MediaBuffer) callback and Gonk camera.
// MediaBuffer will transfers to MediaStreamGraph via AppendToTrack.
nsresult InitDirectMediaBuffer();
mozilla::ReentrantMonitor mCallbackMonitor; // Monitor for camera callback handling
// This is only modified on MainThread (AllocImpl and DeallocImpl)
nsRefPtr<ICameraControl> mCameraControl;
nsCOMPtr<nsIDOMFile> mLastCapture;
android::sp<android::GonkCameraSource> mCameraSource;
// These are protected by mMonitor in parent class
nsTArray<nsRefPtr<PhotoCallback>> mPhotoCallbacks;
int mRotation;

Просмотреть файл

@ -1171,6 +1171,7 @@ void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk(
last_img_ = serial;
ImageFormat format = img->GetFormat();
layers::PlanarYCbCrImage* yuvImage = img->AsPlanarYCbCrImage();
#ifdef WEBRTC_GONK
if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) {
layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(img);
@ -1187,21 +1188,17 @@ void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk(
graphicBuffer->unlock();
} else
#endif
if (format == ImageFormat::PLANAR_YCBCR) {
// Cast away constness b/c some of the accessors are non-const
layers::PlanarYCbCrImage* yuv =
const_cast<layers::PlanarYCbCrImage *>(
static_cast<const layers::PlanarYCbCrImage *>(img));
if (yuvImage) {
// Big-time assumption here that this is all contiguous data coming
// from getUserMedia or other sources.
const layers::PlanarYCbCrData *data = yuv->GetData();
const layers::PlanarYCbCrData *data = yuvImage->GetData();
uint8_t *y = data->mYChannel;
uint8_t *cb = data->mCbChannel;
uint8_t *cr = data->mCrChannel;
uint32_t width = yuv->GetSize().width;
uint32_t height = yuv->GetSize().height;
uint32_t length = yuv->GetDataSize();
uint32_t width = yuvImage->GetSize().width;
uint32_t height = yuvImage->GetSize().height;
uint32_t length = yuvImage->GetDataSize();
// NOTE: length may be rounded up or include 'other' data (see
// YCbCrImageDataDeserializerBase::ComputeMinBufferSize())