Bug 811757: Allow the user to explicitly share devices between tabs r=anant

This commit is contained in:
Randell Jesup 2012-12-31 18:12:12 -05:00
Родитель fac46bef15
Коммит 5cd17e7cda
8 изменённых файлов: 182 добавлений и 126 удалений

Просмотреть файл

@ -28,6 +28,12 @@ enum MediaEngineState {
kReleased
};
// We only support 1 audio and 1 video track for now.
enum {
kVideoTrack = 1,
kAudioTrack = 2
};
class MediaEngine
{
public:
@ -74,10 +80,14 @@ public:
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile) = 0;
/* Called when the stream wants more data */
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) = 0;
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
StreamTime aDesiredTime,
TrackTicks &aLastEndTime) = 0;
/* Stop the device and release the corresponding MediaStream */
virtual nsresult Stop() = 0;
virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;
/* Return false if device is currently allocated or started */
bool IsAvailable() {

Просмотреть файл

@ -168,7 +168,7 @@ MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
}
nsresult
MediaEngineDefaultVideoSource::Stop()
MediaEngineDefaultVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
{
if (mState != kStarted) {
return NS_ERROR_FAILURE;
@ -180,8 +180,8 @@ MediaEngineDefaultVideoSource::Stop()
mTimer->Cancel();
mTimer = NULL;
mSource->EndTrack(mTrackID);
mSource->Finish();
aSource->EndTrack(aID);
aSource->Finish();
mState = kStopped;
return NS_OK;
@ -353,7 +353,7 @@ MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
}
nsresult
MediaEngineDefaultAudioSource::Stop()
MediaEngineDefaultAudioSource::Stop(SourceMediaStream *aSource, TrackID aID)
{
if (mState != kStarted) {
return NS_ERROR_FAILURE;
@ -365,8 +365,8 @@ MediaEngineDefaultAudioSource::Stop()
mTimer->Cancel();
mTimer = NULL;
mSource->EndTrack(mTrackID);
mSource->Finish();
aSource->EndTrack(aID);
aSource->Finish();
mState = kStopped;
return NS_OK;

Просмотреть файл

@ -42,9 +42,14 @@ public:
virtual nsresult Allocate();
virtual nsresult Deallocate();
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop();
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
StreamTime aDesiredTime,
TrackTicks &aLastEndTime) {}
NS_DECL_ISUPPORTS
NS_DECL_NSITIMERCALLBACK
@ -79,9 +84,14 @@ public:
virtual nsresult Allocate();
virtual nsresult Deallocate();
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop();
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
StreamTime aDesiredTime,
TrackTicks &aLastEndTime) {}
NS_DECL_ISUPPORTS
NS_DECL_NSITIMERCALLBACK

Просмотреть файл

@ -88,9 +88,13 @@ public:
virtual nsresult Allocate();
virtual nsresult Deallocate();
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop();
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
StreamTime aDesiredTime,
TrackTicks &aLastEndTime);
NS_DECL_ISUPPORTS
@ -136,7 +140,7 @@ private:
TrackTicks mLastEndTime;
mozilla::ReentrantMonitor mMonitor; // Monitor for processing WebRTC frames.
SourceMediaStream* mSource;
nsTArray<SourceMediaStream *> mSources; // When this goes empty, we shut down HW
int mFps; // Track rate (30 fps by default)
int mMinFps; // Min rate we want to accept
@ -184,9 +188,13 @@ public:
virtual nsresult Allocate();
virtual nsresult Deallocate();
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop();
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
StreamTime aDesiredTime,
TrackTicks &aLastEndTime);
// VoEMediaProcess.
void Process(const int channel, const webrtc::ProcessingTypes type,
@ -208,6 +216,7 @@ private:
webrtc::VoENetwork* mVoENetwork;
mozilla::ReentrantMonitor mMonitor;
nsTArray<SourceMediaStream *> mSources; // When this goes empty, we shut down HW
int mCapIndex;
int mChannel;
@ -217,7 +226,6 @@ private:
nsString mDeviceName;
nsString mDeviceUUID;
SourceMediaStream* mSource;
NullTransport *mNullTransport;
};

Просмотреть файл

@ -49,52 +49,60 @@ MediaEngineWebRTCAudioSource::GetUUID(nsAString& aUUID)
nsresult
MediaEngineWebRTCAudioSource::Allocate()
{
if (mState != kReleased) {
return NS_ERROR_FAILURE;
if (mState == kReleased && mInitDone) {
webrtc::VoEHardware* ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
int res = ptrVoEHw->SetRecordingDevice(mCapIndex);
ptrVoEHw->Release();
if (res) {
return NS_ERROR_FAILURE;
}
mState = kAllocated;
LOG(("Audio device %d allocated", mCapIndex));
} else if (mSources.IsEmpty()) {
LOG(("Audio device %d reallocated", mCapIndex));
} else {
LOG(("Audio device %d allocated shared", mCapIndex));
}
webrtc::VoEHardware* ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
int res = ptrVoEHw->SetRecordingDevice(mCapIndex);
ptrVoEHw->Release();
if (res) {
return NS_ERROR_FAILURE;
}
LOG(("Audio device %d allocated", mCapIndex));
mState = kAllocated;
return NS_OK;
}
nsresult
MediaEngineWebRTCAudioSource::Deallocate()
{
if (mState != kStopped && mState != kAllocated) {
return NS_ERROR_FAILURE;
}
if (mSources.IsEmpty()) {
if (mState != kStopped && mState != kAllocated) {
return NS_ERROR_FAILURE;
}
mState = kReleased;
mState = kReleased;
LOG(("Audio device %d deallocated", mCapIndex));
} else {
LOG(("Audio device %d deallocated but still in use", mCapIndex));
}
return NS_OK;
}
nsresult
MediaEngineWebRTCAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
{
if (!mInitDone || mState != kAllocated) {
return NS_ERROR_FAILURE;
}
if (!aStream) {
if (!mInitDone || !aStream) {
return NS_ERROR_FAILURE;
}
mSource = aStream;
mSources.AppendElement(aStream);
AudioSegment* segment = new AudioSegment();
segment->Init(CHANNELS);
mSource->AddTrack(aID, SAMPLE_FREQUENCY, 0, segment);
mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX);
aStream->AddTrack(aID, SAMPLE_FREQUENCY, 0, segment);
aStream->AdvanceKnownTracksTime(STREAM_TIME_MAX);
LOG(("Initial audio"));
mTrackID = aID;
if (mState == kStarted) {
return NS_OK;
}
mState = kStarted;
if (mVoEBase->StartReceive(mChannel)) {
return NS_ERROR_FAILURE;
}
@ -105,13 +113,19 @@ MediaEngineWebRTCAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
// Attach external media processor, so this::Process will be called.
mVoERender->RegisterExternalMediaProcessing(mChannel, webrtc::kRecordingPerChannel, *this);
mState = kStarted;
return NS_OK;
}
nsresult
MediaEngineWebRTCAudioSource::Stop()
MediaEngineWebRTCAudioSource::Stop(SourceMediaStream *aSource, TrackID aID)
{
if (!mSources.RemoveElement(aSource)) {
// Already stopped - this is allowed
return NS_OK;
}
if (!mSources.IsEmpty()) {
return NS_OK;
}
if (mState != kStarted) {
return NS_ERROR_FAILURE;
}
@ -119,6 +133,12 @@ MediaEngineWebRTCAudioSource::Stop()
return NS_ERROR_FAILURE;
}
{
ReentrantMonitorAutoEnter enter(mMonitor);
mState = kStopped;
aSource->EndTrack(aID);
}
mVoERender->DeRegisterExternalMediaProcessing(mChannel, webrtc::kRecordingPerChannel);
if (mVoEBase->StopSend(mChannel)) {
@ -127,27 +147,22 @@ MediaEngineWebRTCAudioSource::Stop()
if (mVoEBase->StopReceive(mChannel)) {
return NS_ERROR_FAILURE;
}
{
ReentrantMonitorAutoEnter enter(mMonitor);
mState = kStopped;
mSource->EndTrack(mTrackID);
}
return NS_OK;
}
void
MediaEngineWebRTCAudioSource::NotifyPull(MediaStreamGraph* aGraph,
StreamTime aDesiredTime)
SourceMediaStream *aSource,
TrackID aID,
StreamTime aDesiredTime,
TrackTicks &aLastEndTime)
{
// Ignore - we push audio data
#ifdef DEBUG
static TrackTicks mLastEndTime = 0;
TrackTicks target = TimeToTicksRoundUp(SAMPLE_FREQUENCY, aDesiredTime);
TrackTicks delta = target - mLastEndTime;
LOG(("Audio:NotifyPull: target %lu, delta %lu",(uint32_t) target, (uint32_t) delta));
mLastEndTime = target;
TrackTicks delta = target - aLastEndTime;
LOG(("Audio:NotifyPull: target %lu, delta %lu",(uint64_t) target, (uint64_t) delta));
aLastEndTime = target;
#endif
}
@ -235,10 +250,13 @@ MediaEngineWebRTCAudioSource::Shutdown()
}
if (mState == kStarted) {
Stop();
while (!mSources.IsEmpty()) {
Stop(mSources[0], kAudioTrack); // XXX change to support multiple tracks
}
MOZ_ASSERT(mState == kStopped);
}
if (mState == kAllocated) {
if (mState == kAllocated || mState == kStopped) {
Deallocate();
}
@ -269,17 +287,22 @@ MediaEngineWebRTCAudioSource::Process(const int channel,
if (mState != kStarted)
return;
nsRefPtr<SharedBuffer> buffer = SharedBuffer::Create(length * sizeof(sample));
uint32_t len = mSources.Length();
for (uint32_t i = 0; i < len; i++) {
nsRefPtr<SharedBuffer> buffer = SharedBuffer::Create(length * sizeof(sample));
sample* dest = static_cast<sample*>(buffer->Data());
memcpy(dest, audio10ms, length * sizeof(sample));
sample* dest = static_cast<sample*>(buffer->Data());
memcpy(dest, audio10ms, length * sizeof(sample));
AudioSegment segment;
segment.Init(CHANNELS);
segment.AppendFrames(
buffer.forget(), length, 0, length, AUDIO_FORMAT_S16
);
mSource->AppendToTrack(mTrackID, &segment);
AudioSegment segment;
segment.Init(CHANNELS);
segment.AppendFrames(buffer.forget(), length, 0, length, AUDIO_FORMAT_S16);
SourceMediaStream *source = mSources[i];
if (source) {
source->AppendToTrack(mTrackID, &segment);
}
}
return;
}

Просмотреть файл

@ -97,7 +97,10 @@ MediaEngineWebRTCVideoSource::DeliverFrame(
// this means that no *real* frame can be inserted during this period.
void
MediaEngineWebRTCVideoSource::NotifyPull(MediaStreamGraph* aGraph,
StreamTime aDesiredTime)
SourceMediaStream *aSource,
TrackID aID,
StreamTime aDesiredTime,
TrackTicks &aLastEndTime)
{
VideoSegment segment;
@ -108,14 +111,14 @@ MediaEngineWebRTCVideoSource::NotifyPull(MediaStreamGraph* aGraph,
// Note: we're not giving up mImage here
nsRefPtr<layers::Image> image = mImage;
TrackTicks target = TimeToTicksRoundUp(USECS_PER_S, aDesiredTime);
TrackTicks delta = target - mLastEndTime;
TrackTicks delta = target - aLastEndTime;
#ifdef LOG_ALL_FRAMES
LOG(("NotifyPull, target = %lu, delta = %lu", (uint64_t) target, (uint64_t) delta));
#endif
// NULL images are allowed
segment.AppendFrame(image ? image.forget() : nullptr, delta, gfxIntSize(mWidth, mHeight));
mSource->AppendToTrack(mTrackID, &(segment));
mLastEndTime = target;
aSource->AppendToTrack(aID, &(segment));
aLastEndTime = target;
}
void
@ -189,32 +192,40 @@ MediaEngineWebRTCVideoSource::GetUUID(nsAString& aUUID)
nsresult
MediaEngineWebRTCVideoSource::Allocate()
{
if (mState != kReleased) {
return NS_ERROR_FAILURE;
}
if (!mCapabilityChosen) {
// XXX these should come from constraints
ChooseCapability(mWidth, mHeight, mMinFps);
}
if (mViECapture->AllocateCaptureDevice(mUniqueId, KMaxUniqueIdLength, mCaptureIndex)) {
return NS_ERROR_FAILURE;
if (mState == kReleased && mInitDone) {
if (mViECapture->AllocateCaptureDevice(mUniqueId, KMaxUniqueIdLength, mCaptureIndex)) {
return NS_ERROR_FAILURE;
}
mState = kAllocated;
LOG(("Video device %d allocated", mCaptureIndex));
} else if (mSources.IsEmpty()) {
LOG(("Video device %d reallocated", mCaptureIndex));
} else {
LOG(("Video device %d allocated shared", mCaptureIndex));
}
mState = kAllocated;
return NS_OK;
}
nsresult
MediaEngineWebRTCVideoSource::Deallocate()
{
if (mState != kStopped && mState != kAllocated) {
return NS_ERROR_FAILURE;
}
if (mSources.IsEmpty()) {
if (mState != kStopped && mState != kAllocated) {
return NS_ERROR_FAILURE;
}
mViECapture->ReleaseCaptureDevice(mCaptureIndex);
mState = kReleased;
mViECapture->ReleaseCaptureDevice(mCaptureIndex);
mState = kReleased;
LOG(("Video device %d deallocated", mCaptureIndex));
} else {
LOG(("Video device %d deallocated but still in use", mCaptureIndex));
}
return NS_OK;
}
@ -231,28 +242,22 @@ nsresult
MediaEngineWebRTCVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
{
int error = 0;
if (!mInitDone || mState != kAllocated) {
if (!mInitDone || !aStream) {
return NS_ERROR_FAILURE;
}
if (!aStream) {
return NS_ERROR_FAILURE;
}
mSources.AppendElement(aStream);
aStream->AddTrack(aID, USECS_PER_S, 0, new VideoSegment());
aStream->AdvanceKnownTracksTime(STREAM_TIME_MAX);
if (mState == kStarted) {
return NS_OK;
}
mSource = aStream;
mTrackID = aID;
mState = kStarted;
mImageContainer = layers::LayerManager::CreateImageContainer();
mSource->AddTrack(aID, USECS_PER_S, 0, new VideoSegment());
mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX);
mLastEndTime = 0;
mState = kStarted;
error = mViERender->AddRenderer(mCaptureIndex, webrtc::kVideoI420, (webrtc::ExternalRenderer*)this);
if (error == -1) {
return NS_ERROR_FAILURE;
@ -271,8 +276,16 @@ MediaEngineWebRTCVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
}
nsresult
MediaEngineWebRTCVideoSource::Stop()
MediaEngineWebRTCVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
{
if (!mSources.RemoveElement(aSource)) {
// Already stopped - this is allowed
return NS_OK;
}
if (!mSources.IsEmpty()) {
return NS_OK;
}
if (mState != kStarted) {
return NS_ERROR_FAILURE;
}
@ -280,7 +293,7 @@ MediaEngineWebRTCVideoSource::Stop()
{
ReentrantMonitorAutoEnter enter(mMonitor);
mState = kStopped;
mSource->EndTrack(mTrackID);
aSource->EndTrack(aID);
}
mViERender->StopRender(mCaptureIndex);
@ -423,22 +436,19 @@ MediaEngineWebRTCVideoSource::Init()
void
MediaEngineWebRTCVideoSource::Shutdown()
{
bool continueShutdown = false;
if (!mInitDone) {
return;
}
if (mState == kStarted) {
mViERender->StopRender(mCaptureIndex);
mViERender->RemoveRenderer(mCaptureIndex);
continueShutdown = true;
while (!mSources.IsEmpty()) {
Stop(mSources[0], kVideoTrack); // XXX change to support multiple tracks
}
MOZ_ASSERT(mState == kStopped);
}
if (mState == kAllocated || continueShutdown) {
mViECapture->StopCapture(mCaptureIndex);
mViECapture->ReleaseCaptureDevice(mCaptureIndex);
continueShutdown = false;
if (mState == kAllocated || mState == kStopped) {
Deallocate();
}
mViECapture->Release();

Просмотреть файл

@ -670,22 +670,18 @@ public:
new nsTArray<nsCOMPtr<nsIMediaDevice> >;
/**
* We only display available devices in the UI for now. We can easily
* change this later, when we implement a more sophisticated UI that
* lets the user revoke a device currently held by another tab (or
* we decide to provide a stream from a device already allocated).
* We're allowing multiple tabs to access the same camera for parity
* with Chrome. See bug 811757 for some of the issues surrounding
* this decision. To disallow, we'd filter by IsAvailable() as we used
* to.
*/
for (i = 0; i < videoCount; i++) {
MediaEngineVideoSource *vSource = videoSources[i];
if (vSource->IsAvailable()) {
devices->AppendElement(new MediaDevice(vSource));
}
devices->AppendElement(new MediaDevice(vSource));
}
for (i = 0; i < audioCount; i++) {
MediaEngineAudioSource *aSource = audioSources[i];
if (aSource->IsAvailable()) {
devices->AppendElement(new MediaDevice(aSource));
}
devices->AppendElement(new MediaDevice(aSource));
}
NS_DispatchToMainThread(new DeviceSuccessCallbackRunnable(

Просмотреть файл

@ -27,12 +27,6 @@ extern PRLogModuleInfo* GetMediaManagerLog();
#define MM_LOG(msg)
#endif
// We only support 1 audio and 1 video track for now.
enum {
kVideoTrack = 1,
kAudioTrack = 2
};
class GetUserMediaNotificationEvent: public nsRunnable
{
public:
@ -155,11 +149,11 @@ public:
{
NS_ASSERTION(!NS_IsMainThread(), "Never call on main thread");
if (mAudioSource) {
mAudioSource->Stop();
mAudioSource->Stop(mSourceStream, kAudioTrack);
mAudioSource->Deallocate();
}
if (mVideoSource) {
mVideoSource->Stop();
mVideoSource->Stop(mSourceStream, kVideoTrack);
mVideoSource->Deallocate();
}
// Do this after stopping all tracks with EndTrack()
@ -202,7 +196,10 @@ public:
: mMediaThread(aThread)
, mAudioSource(aAudioSource)
, mVideoSource(aVideoSource)
, mStream(aStream) {}
, mStream(aStream)
, mSourceStream(aStream->GetStream()->AsSourceStream())
, mLastEndTimeAudio(0)
, mLastEndTimeVideo(0) {}
~GetUserMediaCallbackMediaStreamListener()
{
@ -231,8 +228,7 @@ public:
// b) if on MediaStreamGraph thread, dispatch a runnable to MainThread
// to call Invalidate() (with a strong ref to this listener)
runnable = new MediaOperationRunnable(MEDIA_STOP,
mStream->GetStream()->AsSourceStream(),
mAudioSource, mVideoSource);
mSourceStream, mAudioSource, mVideoSource);
mMediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);
return;
@ -253,10 +249,10 @@ public:
// Currently audio sources ignore NotifyPull, but they could
// watch it especially for fake audio.
if (mAudioSource) {
mAudioSource->NotifyPull(aGraph, aDesiredTime);
mAudioSource->NotifyPull(aGraph, mSourceStream, kAudioTrack, aDesiredTime, mLastEndTimeAudio);
}
if (mVideoSource) {
mVideoSource->NotifyPull(aGraph, aDesiredTime);
mVideoSource->NotifyPull(aGraph, mSourceStream, kVideoTrack, aDesiredTime, mLastEndTimeVideo);
}
}
@ -272,6 +268,9 @@ private:
nsRefPtr<MediaEngineSource> mAudioSource;
nsRefPtr<MediaEngineSource> mVideoSource;
nsRefPtr<nsDOMMediaStream> mStream;
SourceMediaStream *mSourceStream; // mStream controls ownership
TrackTicks mLastEndTimeAudio;
TrackTicks mLastEndTimeVideo;
};
typedef nsTArray<nsRefPtr<GetUserMediaCallbackMediaStreamListener> > StreamListeners;