Bug 801843: Change how video frames are inserted into getUserMedia streams to remove blocking r=roc,anant

This commit is contained in:
Randell Jesup 2012-10-17 05:46:40 -04:00
Родитель de3240cb47
Коммит abfb94ebc4
7 изменённых файлов: 96 добавлений и 8 удалений

Просмотреть файл

@ -73,6 +73,9 @@ public:
*/
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile) = 0;
/* Called when the stream wants more data */
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) = 0;
/* Stop the device and release the corresponding MediaStream */
virtual nsresult Stop() = 0;

Просмотреть файл

@ -203,10 +203,19 @@ MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer)
return NS_OK;
}
NS_IMPL_THREADSAFE_ISUPPORTS1(MediaEngineDefaultAudioSource, nsITimerCallback)
void
MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph,
StreamTime aDesiredTime)
{
// Ignore - we push video data
}
/**
* Default audio source.
*/
NS_IMPL_THREADSAFE_ISUPPORTS1(MediaEngineDefaultAudioSource, nsITimerCallback)
MediaEngineDefaultAudioSource::MediaEngineDefaultAudioSource()
: mTimer(nullptr)
{
@ -216,6 +225,13 @@ MediaEngineDefaultAudioSource::MediaEngineDefaultAudioSource()
MediaEngineDefaultAudioSource::~MediaEngineDefaultAudioSource()
{}
void
MediaEngineDefaultAudioSource::NotifyPull(MediaStreamGraph* aGraph,
StreamTime aDesiredTime)
{
// Ignore - we push audio data
}
void
MediaEngineDefaultAudioSource::GetName(nsAString& aName)
{

Просмотреть файл

@ -45,6 +45,7 @@ public:
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop();
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
NS_DECL_ISUPPORTS
NS_DECL_NSITIMERCALLBACK
@ -79,6 +80,7 @@ public:
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop();
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
NS_DECL_ISUPPORTS
NS_DECL_NSITIMERCALLBACK

Просмотреть файл

@ -53,7 +53,7 @@ class MediaEngineWebRTCVideoSource : public MediaEngineVideoSource,
public nsRunnable
{
public:
static const int DEFAULT_VIDEO_FPS = 30;
static const int DEFAULT_VIDEO_FPS = 60;
static const int DEFAULT_MIN_VIDEO_FPS = 10;
// ViEExternalRenderer.
@ -67,6 +67,7 @@ public:
, mCapabilityChosen(false)
, mWidth(640)
, mHeight(480)
, mLastEndTime(0)
, mMonitor("WebRTCCamera.Monitor")
, mFps(DEFAULT_VIDEO_FPS)
, mMinFps(aMinFps)
@ -86,6 +87,7 @@ public:
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop();
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
NS_DECL_ISUPPORTS
@ -128,6 +130,7 @@ private:
bool mCapabilityChosen;
int mWidth, mHeight;
TrackID mTrackID;
TrackTicks mLastEndTime;
mozilla::ReentrantMonitor mMonitor; // Monitor for processing WebRTC frames.
SourceMediaStream* mSource;
@ -138,6 +141,7 @@ private:
bool mInSnapshotMode;
nsString* mSnapshotPath;
nsRefPtr<layers::Image> mImage;
nsRefPtr<layers::ImageContainer> mImageContainer;
PRLock* mSnapshotLock;
@ -177,6 +181,7 @@ public:
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop();
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
// VoEMediaProcess.
void Process(const int channel, const webrtc::ProcessingTypes type,

Просмотреть файл

@ -89,6 +89,7 @@ MediaEngineWebRTCAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
segment->Init(CHANNELS);
mSource->AddTrack(aID, SAMPLE_FREQUENCY, 0, segment);
mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX);
LOG(("Initial audio"));
mTrackID = aID;
if (mVoEBase->StartReceive(mChannel)) {
@ -128,6 +129,13 @@ MediaEngineWebRTCAudioSource::Stop()
return NS_OK;
}
void
MediaEngineWebRTCAudioSource::NotifyPull(MediaStreamGraph* aGraph,
StreamTime aDesiredTime)
{
// Ignore - we push audio data
}
nsresult
MediaEngineWebRTCAudioSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile)
{

Просмотреть файл

@ -36,8 +36,6 @@ int
MediaEngineWebRTCVideoSource::DeliverFrame(
unsigned char* buffer, int size, uint32_t time_stamp, int64_t render_time)
{
ReentrantMonitorAutoEnter enter(mMonitor);
if (mInSnapshotMode) {
// Set the condition variable to false and notify Snapshot().
PR_Lock(mSnapshotLock);
@ -55,6 +53,7 @@ MediaEngineWebRTCVideoSource::DeliverFrame(
// Create a video frame and append it to the track.
ImageFormat format = PLANAR_YCBCR;
nsRefPtr<layers::Image> image = mImageContainer->CreateImage(&format, 1);
layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
@ -78,12 +77,48 @@ MediaEngineWebRTCVideoSource::DeliverFrame(
videoImage->SetData(data);
VideoSegment segment;
segment.AppendFrame(image.forget(), 1, gfxIntSize(mWidth, mHeight));
mSource->AppendToTrack(mTrackID, &(segment));
#ifdef LOG_ALL_FRAMES
static uint32_t frame_num = 0;
LOG(("frame %d; timestamp %u, render_time %lu", frame_num++, time_stamp, render_time));
#endif
// we don't touch anything in 'this' until here (except for snapshot,
// which has it's own lock)
ReentrantMonitorAutoEnter enter(mMonitor);
// implicitly releases last image
mImage = image.forget();
return 0;
}
// Called if the graph thinks it's running out of buffered video; repeat
// the last frame for whatever minimum period it think it needs. Note that
// this means that no *real* frame can be inserted during this period.
void
MediaEngineWebRTCVideoSource::NotifyPull(MediaStreamGraph* aGraph,
StreamTime aDesiredTime)
{
VideoSegment segment;
ReentrantMonitorAutoEnter enter(mMonitor);
if (mState != kStarted)
return;
// Note: we're not giving up mImage here
nsRefPtr<layers::Image> image = mImage;
TrackTicks target = TimeToTicksRoundUp(USECS_PER_S, aDesiredTime);
TrackTicks delta = target - mLastEndTime;
#ifdef LOG_ALL_FRAMES
LOG(("NotifyPull, target = %lu, delta = %lu", (uint64_t) target, (uint64_t) delta));
#endif
// NULL images are allowed
segment.AppendFrame(image ? image.forget() : nullptr, delta, gfxIntSize(mWidth, mHeight));
mSource->AppendToTrack(mTrackID, &(segment));
mLastEndTime = target;
}
void
MediaEngineWebRTCVideoSource::ChooseCapability(uint32_t aWidth, uint32_t aHeight, uint32_t aMinFPS)
{
@ -213,8 +248,10 @@ MediaEngineWebRTCVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
mTrackID = aID;
mImageContainer = layers::LayerManager::CreateImageContainer();
mSource->AddTrack(aID, mFps, 0, new VideoSegment());
mSource->AddTrack(aID, USECS_PER_S, 0, new VideoSegment());
mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX);
mLastEndTime = 0;
error = mViERender->AddRenderer(mCaptureIndex, webrtc::kVideoI420, (webrtc::ExternalRenderer*)this);
if (error == -1) {

Просмотреть файл

@ -110,6 +110,8 @@ public:
nsresult rv;
SourceMediaStream* stream = mStream->GetStream()->AsSourceStream();
stream->SetPullEnabled(true);
if (mAudioSource) {
rv = mAudioSource->Start(stream, kAudioTrack);
if (NS_FAILED(rv)) {
@ -122,6 +124,7 @@ public:
MM_LOG(("Starting video failed, rv=%d",rv));
}
}
MM_LOG(("started all sources"));
nsCOMPtr<GetUserMediaNotificationEvent> event =
new GetUserMediaNotificationEvent(GetUserMediaNotificationEvent::STARTING);
@ -135,6 +138,20 @@ public:
return;
}
// Proxy NotifyPull() to sources
void
NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime)
{
// Currently audio sources ignore NotifyPull, but they could
// watch it especially for fake audio.
if (mAudioSource) {
mAudioSource->NotifyPull(aGraph, aDesiredTime);
}
if (mVideoSource) {
mVideoSource->NotifyPull(aGraph, aDesiredTime);
}
}
private:
nsRefPtr<MediaEngineSource> mAudioSource;
nsRefPtr<MediaEngineSource> mVideoSource;