Bug 869224 - Use the same algorithm to compute the buffer start/end time and to convert AudioParam time values to ticks; r=roc

This commit is contained in:
Ehsan Akhgari 2013-05-07 23:31:15 -04:00
Родитель 5db083ef7f
Коммит f2d6931593
5 изменённых файлов: 84 добавлений и 13 удалений

Просмотреть файл

@ -53,10 +53,9 @@ void
AudioNodeStream::SetStreamTimeParameterImpl(uint32_t aIndex, MediaStream* aRelativeToStream,
double aStreamTime)
{
StreamTime streamTime = std::max<MediaTime>(0, SecondsToMediaTime(aStreamTime));
GraphTime graphTime = aRelativeToStream->StreamTimeToGraphTime(streamTime);
StreamTime thisStreamTime = GraphTimeToStreamTimeOptimistic(graphTime);
TrackTicks ticks = TimeToTicksRoundUp(IdealAudioRate(), thisStreamTime);
TrackTicks ticks =
WebAudioUtils::ConvertDestinationStreamTimeToSourceStreamTime(
aStreamTime, this, aRelativeToStream);
mEngine->SetStreamTimeParameter(aIndex, ticks);
}

Просмотреть файл

@ -18,22 +18,29 @@ struct ConvertTimeToTickHelper
static int64_t Convert(double aTime, void* aClosure)
{
TrackRate sampleRate = IdealAudioRate();
StreamTime streamTime;
ConvertTimeToTickHelper* This = static_cast<ConvertTimeToTickHelper*> (aClosure);
if (This->mSourceStream) {
TrackTicks tick = This->mDestinationStream->GetCurrentPosition();
StreamTime destinationStreamTime = TicksToTimeRoundDown(sampleRate, tick);
GraphTime graphTime = This->mDestinationStream->StreamTimeToGraphTime(destinationStreamTime);
streamTime = This->mSourceStream->GraphTimeToStreamTime(graphTime);
return WebAudioUtils::ConvertDestinationStreamTimeToSourceStreamTime(
aTime, This->mSourceStream, This->mDestinationStream);
} else {
streamTime = This->mDestinationStream->GetCurrentPosition();
StreamTime streamTime = This->mDestinationStream->GetCurrentPosition();
return TimeToTicksRoundUp(IdealAudioRate(), streamTime + SecondsToMediaTime(aTime));
}
return TimeToTicksRoundDown(sampleRate, streamTime + SecondsToMediaTime(aTime));
}
};
TrackTicks
WebAudioUtils::ConvertDestinationStreamTimeToSourceStreamTime(double aTime,
MediaStream* aSource,
MediaStream* aDestination)
{
StreamTime streamTime = std::max<MediaTime>(0, SecondsToMediaTime(aTime));
GraphTime graphTime = aDestination->StreamTimeToGraphTime(streamTime);
StreamTime thisStreamTime = aSource->GraphTimeToStreamTimeOptimistic(graphTime);
TrackTicks ticks = TimeToTicksRoundUp(IdealAudioRate(), thisStreamTime);
return ticks;
}
double
WebAudioUtils::StreamPositionToDestinationTime(TrackTicks aSourcePosition,
AudioNodeStream* aSource,

Просмотреть файл

@ -41,6 +41,15 @@ struct WebAudioUtils {
return 1.0 - std::exp(-1.0 / (aDuration * aSampleRate));
}
/**
* Convert a time in second relative to the destination stream to
* TrackTicks relative to the source stream.
*/
static TrackTicks
ConvertDestinationStreamTimeToSourceStreamTime(double aTime,
MediaStream* aSource,
MediaStream* aDestination);
/**
* Converts AudioParamTimeline floating point time values to tick values
* with respect to a source and a destination AudioNodeStream.

Просмотреть файл

@ -31,6 +31,7 @@ MOCHITEST_FILES := \
test_audioParamLinearRamp.html \
test_audioParamSetCurveAtTime.html \
test_audioParamSetTargetAtTime.html \
test_audioParamTimelineDestinationOffset.html \
test_audioBufferSourceNode.html \
test_audioBufferSourceNodeLazyLoopParam.html \
test_audioBufferSourceNodeLoop.html \

Просмотреть файл

@ -0,0 +1,55 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test AudioParam timeline events scheduled after the destination stream has started playback</title>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="webaudio.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<pre id="test">
<script class="testbody" type="text/javascript">
SimpleTest.waitForExplicitFinish();
addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
var sourceBuffer = context.createBuffer(1, 2048, context.sampleRate);
for (var i = 0; i < 2048; ++i) {
sourceBuffer.getChannelData(0)[i] = 1;
}
var emptyBuffer = context.createBuffer(1, 16384, context.sampleRate);
setTimeout(function() {
var source = context.createBufferSource();
source.buffer = sourceBuffer;
source.start(context.currentTime);
source.stop(context.currentTime + sourceBuffer.duration);
var gain = context.createGain();
gain.gain.setValueAtTime(0, context.currentTime);
gain.gain.setTargetAtTime(0, context.currentTime + sourceBuffer.duration, 1);
source.connect(gain);
var sp = context.createScriptProcessor(16384, 1);
gain.connect(sp);
sp.connect(context.destination);
sp.onaudioprocess = function(e) {
is(e.inputBuffer.numberOfChannels, 1, "Correct input channel count");
compareBuffers(e.inputBuffer.getChannelData(0), emptyBuffer.getChannelData(0));
sp.onaudioprocess = null;
SpecialPowers.clearUserPref("media.webaudio.enabled");
SimpleTest.finish();
};
}, 100);
});
</script>
</pre>
</body>
</html>