Refactor API to allow seperate preview and record steps

This commit is contained in:
Anant Narayanan 2011-03-28 15:09:06 -07:00
Родитель fdd00a6e6c
Коммит f2a6d84f03
10 изменённых файлов: 286 добавлений и 128 удалений

Просмотреть файл

@ -21,7 +21,7 @@ endif
endif
so_files=components/libmediarecorder.$(so)
xpt_files=components/IMediaRecorder.xpt
xpt_files=components/IMediaDevice.xpt
xpi_name=rainbow-$(VERSION)-dev.xpi
xpi_files=chrome.manifest install.rdf content/ $(so_files) $(xpt_files)

Просмотреть файл

@ -2,7 +2,7 @@ resource rainbow ./
content rainbow content/
overlay chrome://browser/content/browser.xul chrome://rainbow/content/browser.xul
interfaces components/IMediaRecorder.xpt
interfaces components/IMediaDevice.xpt
binary-component components/libmediarecorder.dylib ABI=Darwin_x86-gcc3
binary-component components/libmediarecorder.dll ABI=WINNT_x86-msvc
binary-component components/libmediarecorder.so ABI=Linux_x86-gcc3

Просмотреть файл

@ -46,10 +46,14 @@ interface nsIMediaStateObserver : nsISupports
};
[scriptable, uuid(c467b1f4-551c-4e2f-a6ba-cb7d792d1452)]
interface IMediaRecorder : nsISupports
interface IMediaDevice : nsISupports
{
void recordToFile(in nsIPropertyBag2 prop,
void beginSession(in nsIPropertyBag2 prop,
in nsIDOMCanvasRenderingContext2D ctx,
in nsILocalFile file, in nsIMediaStateObserver obs);
void stop();
in nsIMediaStateObserver obs);
void beginRecord(in nsILocalFile file);
void pauseRecord();
void resumeRecord();
void endRecord();
void endSession();
};

Просмотреть файл

@ -53,7 +53,7 @@ target = libmediarecorder
so_target = $(target:=.$(so))
# source and path configurations
idl = IMediaRecorder.idl
idl = IMediaDevice.idl
cpp_sources = MediaRecorder.cpp MediaModule.cpp Convert.cpp \
VideoSource.cpp $(os)/VideoSource$(teh).cpp \
AudioSource.cpp $(os)/AudioSource$(teh).cpp \

Просмотреть файл

@ -38,9 +38,9 @@
#include "assert.h"
#define MICROSECONDS 1000000
#define TOLERANCE 0.010000
#define TOLERANCE 0.100000
NS_IMPL_ISUPPORTS1(MediaRecorder, IMediaRecorder)
NS_IMPL_ISUPPORTS1(MediaRecorder, IMediaDevice)
MediaRecorder *MediaRecorder::gMediaRecordingService = nsnull;
MediaRecorder *
@ -184,7 +184,7 @@ MediaRecorder::GetVideoPacket(PRInt32 *len, PRFloat64 *times)
rv = vState->vPipeIn->Read((char *)times, sizeof(PRFloat64), &rd);
rv = vState->vPipeIn->Read((char *)len, sizeof(PRUint32), &rd);
fprintf(stderr, "Got %d video packets at %f\n", *len / vState->backend->GetFrameSize(), *times);
//fprintf(stderr, "Got %d video packets at %f\n", *len / vState->backend->GetFrameSize(), *times);
v_frame = (PRUint8 *)PR_Calloc(*len, sizeof(PRUint8));
do vState->vPipeIn->Available(&rd);
@ -281,21 +281,16 @@ MediaRecorder::Encode()
* run of the loop? Possible answer: No, because the timing might go
* awry, we are better off processing timestamps per frame of video.
*/
nsresult rv;
PRUint32 rd;
int v_fps = FPS_N / FPS_D;
int a_frame_num = FRAMES_BUFFER;
if (v_rec) {
v_fps = vState->backend->GetFPSN() / vState->backend->GetFPSD();
a_frame_num = params->rate/(v_fps);
}
//int v_frame_size = vState->backend->GetFrameSize();
int a_frame_size = aState->backend->GetFrameSize();
int a_frame_total = a_frame_num * a_frame_size;
PRFloat64 v_frame_time_length = (PRFloat64)1.0 / static_cast<PRFloat64>(v_fps);
//int a_frames_rec, v_frames_recorded;
PRUint8 *v_frame = NULL;
PRInt16 *a_frames = NULL;
@ -303,22 +298,35 @@ MediaRecorder::Encode()
PRUint8 *v_frame_future = NULL;
PRUint8 *v_frame_most_recent = NULL;
PRInt32 vlen;
PRBool is_first_video = PR_TRUE, should_end = PR_FALSE;
PRFloat64 atime, delta, current_audio_time = 0, vtime = 0;
PRFloat64 delta, vtime = 0;
if (v_rec && a_rec) {
for (;;) { // start main run loop
if (!is_recording) {
/* If we are not recording, simply clean out the pipe.
* The backends are responsible for painting the preview.
*/
if (!(a_frames = GetAudioPacket(a_frame_total))) {
should_end = PR_TRUE;
fprintf(stderr, "GetAudioPacket returned NULL\n");
goto video;
}
PR_Free(a_frames);
if (!(v_frame_tmp = GetVideoPacket(&vlen, &vtime))) {
fprintf(stderr, "GetVideoPacket returned NULL\n");
goto finish;
}
PR_Free(v_frame_tmp);
safe_rec_stp = PR_TRUE;
} else if (v_rec && a_rec) {
/* If video recording was requested, we started it first so it is
* very likely that the video frame arrived first. This means we will
* encode audio first and drop a few frames of video to align the
* start times.
*/
rv = aState->aPipeIn->Read((char *)&atime, sizeof(PRFloat64), &rd);
fprintf(stderr, "Audio stream started at %f\n", atime);
current_audio_time = atime;
multiplex:
if (!(a_frames = GetAudioPacket(a_frame_total))) {
should_end = PR_TRUE;
fprintf(stderr, "GetAudioPacket returned NULL\n");
@ -327,7 +335,7 @@ multiplex:
if (EncodeAudio(a_frames, a_frame_total) == PR_FALSE) {
goto finish;
}
current_audio_time += v_frame_time_length;
epoch += v_frame_time_length;
}
PR_Free(a_frames);
a_frames = NULL;
@ -339,13 +347,13 @@ multiplex:
*/
video:
if (is_first_video) {
delta = vtime - atime;
delta = vtime - epoch;
while (delta < 0) {
if (v_frame_most_recent) {
PR_Free(v_frame_most_recent); v_frame_most_recent = NULL;
}
v_frame_most_recent = GetVideoPacket(&vlen, &vtime);
delta = vtime - atime;
delta = vtime - epoch;
}
is_first_video = PR_FALSE;
@ -358,11 +366,11 @@ video:
if (v_frame_future) {
v_frame_tmp = v_frame_future;
v_frame_future = NULL;
delta = vtime - current_audio_time;
delta = vtime - epoch;
if (delta < 0) delta = -delta;
} else {
v_frame_tmp = GetVideoPacket(&vlen, &vtime);
delta = vtime - current_audio_time;
delta = vtime - epoch;
assert(delta >= 0);
}
@ -383,11 +391,10 @@ video:
}
v_frame_future = v_frame_tmp;
}
}
}
if (should_end)
return;
goto multiplex;
} else if (v_rec && !a_rec) {
for (;;) {
@ -413,10 +420,13 @@ video:
}
}
} // end main run loop
finish:
safe_rec_stp = PR_TRUE;
if (v_frame) PR_Free(v_frame);
if (a_frames) PR_Free(a_frames);
return;
}
@ -669,47 +679,36 @@ MediaRecorder::ParseProperties(nsIPropertyBag2 *prop)
}
/*
* Start recording to file
* Begin a Rainbow session
*/
NS_IMETHODIMP
MediaRecorder::RecordToFile(
MediaRecorder::BeginSession(
nsIPropertyBag2 *prop,
nsIDOMCanvasRenderingContext2D *ctx,
nsILocalFile *file,
nsIMediaStateObserver *obs
)
{
nsresult rv;
ParseProperties(prop);
canvas = ctx;
observer = obs;
/* Get a file stream from the local file */
nsCOMPtr<nsIFileOutputStream> stream(
do_CreateInstance("@mozilla.org/network/file-output-stream;1")
);
pipeStream = do_QueryInterface(stream, &rv);
if (NS_FAILED(rv)) return rv;
rv = stream->Init(file, -1, -1, 0);
if (NS_FAILED(rv)) return rv;
ParseProperties(prop);
thread = PR_CreateThread(
PR_SYSTEM_THREAD,
MediaRecorder::Record, this,
MediaRecorder::Begin, this,
PR_PRIORITY_NORMAL,
PR_GLOBAL_THREAD,
PR_JOINABLE_THREAD, 0
);
is_recording = PR_FALSE;
return NS_OK;
}
/*
* Start recording (called in a thread)
* Start session (called in a thread)
*/
void
MediaRecorder::Record(void *data)
MediaRecorder::Begin(void *data)
{
nsresult rv;
MediaRecorder *mr = static_cast<MediaRecorder*>(data);
@ -717,7 +716,7 @@ MediaRecorder::Record(void *data)
if (mr->a_rec || mr->v_rec) {
NS_DispatchToMainThread(new MediaCallback(
mr->observer, "error", "recording already in progress"
mr->observer, "error", "session already in progress"
));
return;
}
@ -764,7 +763,6 @@ MediaRecorder::Record(void *data)
/* Get ready for video! */
if (params->video) {
mr->SetupTheoraBOS();
rv = mr->MakePipe(
getter_AddRefs(mr->vState->vPipeIn),
getter_AddRefs(mr->vState->vPipeOut)
@ -779,7 +777,6 @@ MediaRecorder::Record(void *data)
/* Get ready for audio! */
if (params->audio) {
mr->SetupVorbisBOS();
rv = mr->MakePipe(
getter_AddRefs(mr->aState->aPipeIn),
getter_AddRefs(mr->aState->aPipeOut)
@ -794,7 +791,6 @@ MediaRecorder::Record(void *data)
/* Let's DO this. */
if (params->video) {
mr->SetupTheoraHeaders();
rv = mr->vState->backend->Start(mr->vState->vPipeOut, mr->canvas);
if (NS_FAILED(rv)) {
NS_DispatchToMainThread(new MediaCallback(
@ -806,7 +802,6 @@ MediaRecorder::Record(void *data)
mr->v_stp = PR_FALSE;
}
if (params->audio) {
mr->SetupVorbisHeaders();
rv = mr->aState->backend->Start(mr->aState->aPipeOut);
if (NS_FAILED(rv)) {
/* FIXME: Stop and clean up video! */
@ -821,30 +816,30 @@ MediaRecorder::Record(void *data)
/* Start off encoder after notifying observer */
NS_DispatchToMainThread(new MediaCallback(
mr->observer, "started", ""
mr->observer, "session-began", ""
));
mr->Encode();
return;
}
/*
* Stop recording
* End Session
*/
NS_IMETHODIMP
MediaRecorder::Stop()
MediaRecorder::EndSession()
{
if (!a_rec && !v_rec) {
NS_DispatchToMainThread(new MediaCallback(
observer, "error", "no recording in progress"
observer, "error", "no session in progress"
));
return NS_ERROR_FAILURE;
}
/* Return quickly and actually stop in a thread, notifying caller via
* the 'observer' */
* the observer */
PR_CreateThread(
PR_SYSTEM_THREAD,
MediaRecorder::StopRecord, this,
MediaRecorder::End, this,
PR_PRIORITY_NORMAL,
PR_GLOBAL_THREAD,
PR_JOINABLE_THREAD, 0
@ -854,10 +849,9 @@ MediaRecorder::Stop()
}
void
MediaRecorder::StopRecord(void *data)
MediaRecorder::End(void *data)
{
nsresult rv;
PRUint32 wr;
MediaRecorder *mr = static_cast<MediaRecorder*>(data);
if (mr->v_rec) {
@ -894,26 +888,127 @@ MediaRecorder::StopRecord(void *data)
if (mr->v_rec) {
mr->vState->vPipeIn->Close();
th_encode_free(mr->vState->th);
/* Video trailer */
if (ogg_stream_flush(&mr->vState->os, &mr->vState->og)) {
rv = mr->WriteData(
mr->vState->og.header, mr->vState->og.header_len, &wr
);
rv = mr->WriteData(
mr->vState->og.body, mr->vState->og.body_len, &wr
);
}
ogg_stream_clear(&mr->vState->os);
mr->v_rec = PR_FALSE;
}
if (mr->a_rec) {
mr->aState->aPipeIn->Close();
mr->a_rec = PR_FALSE;
}
/* Audio trailer */
/* GG */
NS_DispatchToMainThread(new MediaCallback(
mr->observer, "session-ended", ""
));
return;
}
/*
* Start recording
*/
NS_IMETHODIMP
MediaRecorder::BeginRecord(nsILocalFile *file)
{
/* Get a file stream from the local file */
nsresult rv;
nsCOMPtr<nsIFileOutputStream> stream(
do_CreateInstance("@mozilla.org/network/file-output-stream;1")
);
pipeStream = do_QueryInterface(stream, &rv);
if (NS_FAILED(rv)) return rv;
rv = stream->Init(file, -1, -1, 0);
if (NS_FAILED(rv)) return rv;
/* Note that the BOS has to come before regular headers */
if (params->audio)
SetupVorbisBOS();
if (params->video)
SetupTheoraBOS();
if (params->audio)
SetupVorbisHeaders();
if (params->video)
SetupTheoraHeaders();
/* FIXME: This timestamp should come from backend, not here */
PRTime epoch_c = PR_Now();
epoch = (PRFloat64)(epoch_c / MICROSECONDS);
epoch += ((PRFloat64)(epoch_c % MICROSECONDS)) / MICROSECONDS;
is_recording = PR_TRUE;
safe_rec_stp = PR_FALSE;
NS_DispatchToMainThread(new MediaCallback(
observer, "record-began", ""
));
return NS_OK;
}
/*
* Pause recording
*/
NS_IMETHODIMP
MediaRecorder::PauseRecord()
{
is_recording = PR_FALSE;
return NS_OK;
}
/*
* Resume recording
*/
NS_IMETHODIMP
MediaRecorder::ResumeRecord()
{
is_recording = PR_TRUE;
return NS_OK;
}
/*
* Stop recording
*/
NS_IMETHODIMP
MediaRecorder::EndRecord()
{
is_recording = PR_FALSE;
PR_CreateThread(
PR_SYSTEM_THREAD,
MediaRecorder::EndRecordThread, this,
PR_PRIORITY_NORMAL,
PR_GLOBAL_THREAD,
PR_JOINABLE_THREAD, 0
);
return NS_OK;
}
void
MediaRecorder::EndRecordThread(void *data)
{
nsresult rv;
PRUint32 wr;
MediaRecorder *mr = static_cast<MediaRecorder*>(data);
/* Wait for the Encode method to set safe_rec_stp */
while (!mr->safe_rec_stp) {
PR_Sleep(PR_INTERVAL_MIN);
}
/* Video trailer */
if (mr->v_rec) {
th_encode_free(mr->vState->th);
if (ogg_stream_flush(&mr->vState->os, &mr->vState->og)) {
rv = mr->WriteData(
mr->vState->og.header, mr->vState->og.header_len, &wr
);
rv = mr->WriteData(
mr->vState->og.body, mr->vState->og.body_len, &wr
);
}
ogg_stream_clear(&mr->vState->os);
}
/* Audio trailer */
if (mr->a_rec) {
vorbis_analysis_wrote(&mr->aState->vd, 0);
mr->WriteAudio();
@ -922,13 +1017,10 @@ MediaRecorder::StopRecord(void *data)
vorbis_comment_clear(&mr->aState->vc);
vorbis_info_clear(&mr->aState->vi);
ogg_stream_clear(&mr->aState->os);
mr->a_rec = PR_FALSE;
}
/* GG */
mr->pipeStream->Close();
NS_DispatchToMainThread(new MediaCallback(
mr->observer, "stopped", ""
mr->observer, "record-ended", ""
));
return;
}

Просмотреть файл

@ -37,7 +37,7 @@
#ifndef MediaRecorder_h_
#define MediaRecorder_h_
#include "IMediaRecorder.h"
#include "IMediaDevice.h"
#include <ogg/ogg.h>
#include <vorbis/vorbisenc.h>
@ -75,7 +75,7 @@
#include "VideoSourceCanvas.h"
#define SOCK_LEN 8192
#define MEDIA_RECORDER_CONTRACTID "@labs.mozilla.com/media/recorder;1"
#define MEDIA_RECORDER_CONTRACTID "@labs.mozilla.com/media/device;1"
#define MEDIA_RECORDER_CID { 0xc467b1f4, 0x551c, 0x4e2f, \
{ 0xa6, 0xba, 0xcb, 0x7d, 0x79, 0x2d, 0x14, 0x52 }}
@ -113,11 +113,11 @@ typedef struct {
PRUint32 fps_n, fps_d, width, height, rate, chan;
} Properties;
class MediaRecorder : public IMediaRecorder
class MediaRecorder : public IMediaDevice
{
public:
NS_DECL_ISUPPORTS
NS_DECL_IMEDIARECORDER
NS_DECL_IMEDIADEVICE
nsresult Init();
static MediaRecorder *GetSingleton();
@ -128,7 +128,10 @@ protected:
Audio *aState;
Video *vState;
PRFloat64 epoch;
PRThread *thread;
PRBool is_recording;
PRBool safe_rec_stp;
PRBool a_stp, v_stp;
PRBool a_rec, v_rec;
PRLogModuleInfo *log;
@ -141,9 +144,10 @@ protected:
static MediaRecorder *gMediaRecordingService;
static void Record(void *data);
static void StopRecord(void *data);
static void Begin(void *data);
static void End(void *data);
static void EndRecordThread(void *data);
nsresult SetupTheoraBOS();
nsresult SetupVorbisBOS();
nsresult SetupTheoraHeaders();

Просмотреть файл

@ -10,13 +10,11 @@
<canvas id="tehcanvas" width="640" height="480"></canvas>
<p><input type="button" id="tehbutton" value="Start!"/></p>
<p id="tehmsg"></p>
<input type="file" id="tehfile" style="display:none;"></input>
</body>
<script type="application/javascript;version=1.8">
let Re = false;
let img = new Image();
let msg = document.getElementById("tehmsg");
let fil = document.getElementById("tehfile");
let but = document.getElementById("tehbutton");
let ctx = document.getElementById("tehcanvas").getContext("2d");
@ -27,13 +25,13 @@
function onStateChange(type, arg) {
switch (type) {
case "started":
case "record-began":
msg.innerHTML = "Recording has begun!";
break;
case "stopped":
case "record-ended":
// Recording stopped, but file not ready yet
break;
case "finished":
case "record-finished":
// File is now available
// arg is a DOM File, see
// https://developer.mozilla.org/en/Using_files_from_web_applications
@ -50,7 +48,7 @@
but.onclick = function() {
if (Re) {
// Stop recording
Me.stop();
Me.endSession();
Re = false;
but.value = "Start!";
ctx.drawImage(img, 192, 112);
@ -59,9 +57,9 @@
Re = true;
but.value = "Stop!";
msg.innerHTML = "Loading...";
Me.recordToFile({}, ctx, onStateChange);
Me.beginSession({}, ctx, onStateChange);
Me.beginRecord();
}
}
</script>
</html>

Просмотреть файл

@ -3,12 +3,20 @@ if (window && window.navigator) {
if (!window.navigator.service)
window.navigator.service = {};
window.navigator.service.media = {
recordToFile: function(params, ctx, obs) {
return recStart(window.location, params, ctx, obs);
beginSession: function(params, ctx, obs) {
return _beginSession(window.location, params, ctx, obs);
},
stop: function() {
return recStop(window.location);
beginRecord: function() {
return _beginRecord(window.location);
},
pauseRecord: function() {
return _pauseRecord(window.location);
},
endRecord: function() {
return _endRecord(window.location);
},
endSession: function() {
return _endSession(window.location);
}
}
}

Просмотреть файл

@ -84,16 +84,40 @@ var RainbowObserver = {
sandbox.importFunction(function(loc, prop, ctx, obs) {
rainbow._verifyPermission(window, loc, function(allowed) {
if (allowed) rainbow.recordToFile_verified(prop, ctx, obs);
if (allowed) rainbow.beginSession(prop, ctx, obs);
else throw "Permission denied";
});
}, "recStart");
}, "_beginSession");
sandbox.importFunction(function(loc) {
rainbow._verifyPermission(window, loc, function(allowed) {
if (allowed) rainbow.stop_verified();
if (allowed) rainbow.beginRecord();
else throw "Permission denied";
});
}, "recStop");
}, "_beginRecord");
sandbox.importFunction(function(loc) {
rainbow._verifyPermission(window, loc, function(allowed) {
if (allowed) rainbow.pauseRecord();
else throw "Permission denied";
});
}, "_pauseRecord");
sandbox.importFunction(function(loc) {
rainbow._verifyPermission(window, loc, function(allowed) {
if (allowed) rainbow.resumeRecord();
else throw "Permission denied";
});
}, "_resumeRecord");
sandbox.importFunction(function(loc) {
rainbow._verifyPermission(window, loc, function(allowed) {
if (allowed) rainbow.endRecord();
else throw "Permission denied";
});
}, "_endRecord");
sandbox.importFunction(function(loc) {
rainbow._verifyPermission(window, loc, function(allowed) {
if (allowed) rainbow.endSession();
else throw "Permission denied";
});
}, "_endSession");
let toInject = getInjected();
Components.utils.evalInSandbox(

Просмотреть файл

@ -48,6 +48,7 @@ Cu.import("resource://gre/modules/XPCOMUtils.jsm");
function Rainbow() {
this._input = null;
this._session = false;
this._recording = false;
}
Rainbow.prototype = {
@ -66,11 +67,11 @@ Rainbow.prototype = {
getService(Ci.nsIPermissionManager);
},
get _recorder() {
delete this._recorder;
return this._recorder =
Cc["@labs.mozilla.com/media/recorder;1"].
getService(Ci.IMediaRecorder)
get _rainbow() {
delete this._rainbow;
return this._rainbow =
Cc["@labs.mozilla.com/media/device;1"].
getService(Ci.IMediaDevice)
},
_makeURI: function(url) {
@ -167,13 +168,27 @@ Rainbow.prototype = {
}
},
recordToFile_verified: function(prop, ctx, obs) {
if (this._recording)
throw "Recording already in progress";
beginSession: function(prop, ctx, obs) {
if (this._session)
throw "Session already in progress";
// Make property bag
let bag = this._makePropertyBag(prop);
this._context = ctx;
// Make sure observer is setup correctly, if none provided, ignore
if (obs) this._observer = obs;
else this._observer = function() {};
// Start session
this._rainbow.beginSession(bag, ctx, this._observer);
this._session = true;
},
beginRecord: function() {
if (!this._session)
throw "No session in progress";
// Create a file to dump to
let file = Cc["@mozilla.org/file/directory_service;1"].
getService(Ci.nsIProperties).get("TmpD", Ci.nsILocalFile);
@ -181,32 +196,45 @@ Rainbow.prototype = {
file.createUnique(Ci.nsIFile.NORMAL_FILE_TYPE, 0666);
// Create dummy HTML <input> element to create DOMFile
let doc = ctx.canvas.ownerDocument;
let doc = this._context.canvas.ownerDocument;
this._input = doc.createElement('input');
this._input.type = 'file';
this._input.mozSetFileNameArray([file.path], 1);
this._rainbow.beginRecord(file);
// Make sure observer is setup correctly, if none provided, ignore
if (obs) this._observer = obs;
else this._observer = function() {};
// Start recording
this._recorder.recordToFile(bag, ctx, file, this._observer);
this._recording = true;
},
stop_verified: function() {
pauseRecord: function() {
if (!this._recording)
throw "No recording in progress";
this._recorder.stop();
this._rainbow.pauseRecord();
},
resumeRecord: function() {
if (!this._recording)
throw "No recording in progress";
this._rainbow.resumeRecord();
},
endRecord: function() {
if (!this._recording)
throw "No recording in progress";
this._rainbow.endRecord();
this._recording = false;
if (this._input) {
let ret = this._input;
this._input = null;
this._observer("finished", ret);
this._observer("record-finished", ret);
}
},
endSession: function() {
if (!this._session)
throw "No session in progress";
if (this._recording)
this.endRecord();
this._rainbow.endSession();
this._session = false;
}
};