зеркало из https://github.com/mozilla/shumway.git
Plays PCM data via Audio Data API or Web Audio API
This commit is contained in:
Родитель
ff1eada110
Коммит
aa59714613
|
@ -14,6 +14,7 @@
|
|||
<script src="../../lib/DataView.js/DataView.js"></script>
|
||||
<script src="../../lib/Kanvas/kanvas.js"></script>
|
||||
<script src="../../lib/xstats/xstats.js"></script>
|
||||
<script src="../../lib/mp3/mp3.js"></script>
|
||||
|
||||
<!-- Load SWF Dependencies -->
|
||||
<script src="../../src/swf/util.js"></script>
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
// placeholder for MP3Decoder class
|
|
@ -3,6 +3,7 @@ var LoaderDefinition = (function () {
|
|||
var LOADER_PATH = 'flash/display/Loader.js';
|
||||
var WORKER_SCRIPTS = [
|
||||
'../../../lib/DataView.js/DataView.js',
|
||||
'../../../lib/mp3/mp3.js',
|
||||
|
||||
'../util.js',
|
||||
|
||||
|
|
|
@ -1,9 +1,17 @@
|
|||
var PLAY_USING_AUDIO_TAG = true;
|
||||
|
||||
var SoundDefinition = (function () {
|
||||
|
||||
var audioElement = null;
|
||||
|
||||
function getAudioDescription(soundData, onComplete) {
|
||||
audioElement = audioElement || document.createElement('audio');
|
||||
if (!audioElement.canPlayType(soundData.mimeType)) {
|
||||
onComplete({
|
||||
duration: 0
|
||||
});
|
||||
return;
|
||||
}
|
||||
audioElement.src = "data:" + soundData.mimeType + ";base64," + base64ArrayBuffer(soundData.data);
|
||||
audioElement.load();
|
||||
audioElement.addEventListener("loadedmetadata", function () {
|
||||
|
@ -23,8 +31,19 @@ var SoundDefinition = (function () {
|
|||
this._id3 = new flash.media.ID3Info();
|
||||
|
||||
var s = this.symbol;
|
||||
if (s && s.packaged) {
|
||||
var soundData = s.packaged;
|
||||
if (s) {
|
||||
var soundData = {};
|
||||
if (s.pcm) {
|
||||
soundData.sampleRate = s.sampleRate;
|
||||
soundData.channels = s.channels;
|
||||
soundData.pcm = s.pcm;
|
||||
soundData.end = s.pcm.length;
|
||||
}
|
||||
soundData.completed = true;
|
||||
if (s.packaged) {
|
||||
soundData.data = s.packaged.data.buffer;
|
||||
soundData.mimeType = s.packaged.mimeType;
|
||||
}
|
||||
var _this = this;
|
||||
getAudioDescription(soundData, function (description) {
|
||||
_this._length = description.duration;
|
||||
|
@ -48,27 +67,68 @@ var SoundDefinition = (function () {
|
|||
}
|
||||
|
||||
var _this = this;
|
||||
var loader = this._loader = new flash.net.URLLoader(request);
|
||||
loader.dataFormat = "binary";
|
||||
var stream = this._stream = new flash.net.URLStream();
|
||||
var ByteArrayClass = avm2.systemDomain.getClass("flash.utils.ByteArray");
|
||||
var data = ByteArrayClass.createInstance();
|
||||
var dataPosition = 0;
|
||||
var mp3DecodingSession = null;
|
||||
var soundData = { completed: false };
|
||||
|
||||
loader.addEventListener("progress", function (event) {
|
||||
stream.addEventListener("progress", function (event) {
|
||||
console.info("PROGRESS");
|
||||
_this._bytesLoaded = event.public$bytesLoaded;
|
||||
_this._bytesTotal = event.public$bytesTotal;
|
||||
|
||||
if (!PLAY_USING_AUDIO_TAG && !mp3DecodingSession) {
|
||||
// initialize MP3 decoding
|
||||
mp3DecodingSession = decodeMP3(soundData, function ondurationchanged(duration) {
|
||||
if (_this._length === 0) {
|
||||
// once we have some data, trying to play it
|
||||
_this._soundData = soundData;
|
||||
|
||||
_this._playQueue.forEach(function (item) {
|
||||
item.channel._playSoundDataViaChannel(soundData, item.startTime);
|
||||
});
|
||||
}
|
||||
// TODO estimate duration based on bytesTotal
|
||||
_this._length = duration * 1000;
|
||||
});
|
||||
}
|
||||
|
||||
var bytesAvailable = stream.bytesAvailable;
|
||||
stream.readBytes(data, dataPosition, bytesAvailable);
|
||||
if (mp3DecodingSession) {
|
||||
mp3DecodingSession.pushData(data, dataPosition, bytesAvailable);
|
||||
}
|
||||
dataPosition += bytesAvailable;
|
||||
|
||||
_this.dispatchEvent(event);
|
||||
});
|
||||
|
||||
loader.addEventListener("complete", function (event) {
|
||||
stream.addEventListener("complete", function (event) {
|
||||
_this.dispatchEvent(event);
|
||||
var soundData = _this._soundData = {
|
||||
data: loader.data.a,
|
||||
mimeType: 'audio/mpeg'
|
||||
};
|
||||
soundData.data = data.a;
|
||||
soundData.mimeType = 'audio/mpeg';
|
||||
soundData.completed = true;
|
||||
|
||||
if (PLAY_USING_AUDIO_TAG) {
|
||||
_this._soundData = soundData;
|
||||
|
||||
getAudioDescription(soundData, function (description) {
|
||||
_this._length = description.duration;
|
||||
});
|
||||
|
||||
_this._playQueue.forEach(function (item) {
|
||||
playChannel(soundData, item.channel, item.startTime, item.soundTransform);
|
||||
item.channel._playSoundDataViaAudio(soundData, item.startTime);
|
||||
});
|
||||
}
|
||||
|
||||
if (mp3DecodingSession) {
|
||||
mp3DecodingSession.close();
|
||||
}
|
||||
});
|
||||
|
||||
stream.load(request);
|
||||
},
|
||||
loadCompressedDataFromByteArray: function loadCompressedDataFromByteArray(bytes, bytesLength) {
|
||||
throw 'Not implemented: loadCompressedDataFromByteArray';
|
||||
|
@ -82,22 +142,25 @@ var SoundDefinition = (function () {
|
|||
startTime = startTime || 0;
|
||||
var channel = new flash.media.SoundChannel();
|
||||
channel._sound = this;
|
||||
channel._soundTransform = soundTransform;
|
||||
this._playQueue.push({
|
||||
channel: channel,
|
||||
startTime: startTime,
|
||||
soundTransform: soundTransform
|
||||
startTime: startTime
|
||||
});
|
||||
if (this._soundData) {
|
||||
playChannel(this._soundData, channel, startTime, soundTransform);
|
||||
if (PLAY_USING_AUDIO_TAG)
|
||||
channel._playSoundDataViaAudio(this._soundData, startTime);
|
||||
else
|
||||
channel._playSoundDataViaChannel(this._soundData, startTime);
|
||||
}
|
||||
return channel;
|
||||
},
|
||||
|
||||
get bytesLoaded() {
|
||||
return this._loader.bytesLoaded;
|
||||
return this._bytesLoaded;
|
||||
},
|
||||
get bytesTotal() {
|
||||
return this._loader.bytesTotal;
|
||||
return this._bytesTotal;
|
||||
},
|
||||
get id3() {
|
||||
return this._id3;
|
||||
|
@ -116,13 +179,59 @@ var SoundDefinition = (function () {
|
|||
}
|
||||
};
|
||||
|
||||
function playChannel(soundData, channel, startTime, soundTransform) {
|
||||
var element = channel._element;
|
||||
element.src = "data:" + soundData.mimeType + ";base64," + base64ArrayBuffer(soundData.data);
|
||||
element.addEventListener("loadeddata", function loaded() {
|
||||
element.currentTime = startTime / 1000;
|
||||
element.play();
|
||||
});
|
||||
// TODO send to MP3 decoding worker
|
||||
function decodeMP3(soundData, ondurationchanged) {
|
||||
var currentSize = 8000;
|
||||
var pcm = new Float32Array(currentSize);
|
||||
var position = 0;
|
||||
var mp3Decoder = new MP3Decoder();
|
||||
mp3Decoder.onframedata = function (frame, channels, sampleRate) {
|
||||
if (frame.length === 0)
|
||||
return;
|
||||
if (!position) {
|
||||
// first data: initializes pcm data fields
|
||||
soundData.sampleRate = sampleRate,
|
||||
soundData.channels = channels;
|
||||
soundData.pcm = pcm;
|
||||
}
|
||||
if (position + frame.length >= currentSize) {
|
||||
do {
|
||||
currentSize *= 2;
|
||||
} while (position + frame.length >= currentSize);
|
||||
var newPcm = new Float32Array(currentSize);
|
||||
newPcm.set(pcm);
|
||||
pcm = soundData.pcm = newPcm;
|
||||
}
|
||||
pcm.set(frame, position);
|
||||
soundData.end = position += frame.length;
|
||||
|
||||
var duration = position / soundData.sampleRate / soundData.channels;
|
||||
ondurationchanged(duration);
|
||||
};
|
||||
return {
|
||||
chunks: [],
|
||||
pushData: function (data, offset, length) {
|
||||
function decodeNext() {
|
||||
var chunk = chunks.shift();
|
||||
mp3Decoder.push(chunk);
|
||||
if (chunks.length > 0)
|
||||
setTimeout(decodeNext);
|
||||
}
|
||||
var chunks = this.chunks;
|
||||
var initPush = chunks.length === 0;
|
||||
var maxChunkLength = 8000;
|
||||
for (var i = 0; i < length; i += maxChunkLength) {
|
||||
var chunkOffset = offset + i;
|
||||
var chunkLength = Math.min(length - chunkOffset, maxChunkLength);
|
||||
var chunk = new Uint8Array(data.a, chunkOffset, chunkLength);
|
||||
chunks.push(chunk);
|
||||
}
|
||||
if (initPush)
|
||||
decodeNext();
|
||||
},
|
||||
close: function () {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
var desc = Object.getOwnPropertyDescriptor;
|
||||
|
|
|
@ -2,7 +2,56 @@ const SoundChannelDefinition = (function () {
|
|||
return {
|
||||
// ()
|
||||
initialize: function () {
|
||||
this._element = document.createElement('audio');
|
||||
this._element = null;
|
||||
this._position = 0;
|
||||
this._pcmData = null;
|
||||
this._soundTransform = null;
|
||||
},
|
||||
_playSoundDataViaChannel: function (soundData, startTime) {
|
||||
assert(soundData.pcm, 'no pcm data found');
|
||||
|
||||
var self = this;
|
||||
var position = Math.round(startTime / 1000 * soundData.sampleRate) *
|
||||
soundData.channels;
|
||||
this._position = startTime;
|
||||
this._audioChannel = createAudioChannel(soundData.sampleRate, soundData.channels);
|
||||
this._audioChannel.ondatarequested = function (e) {
|
||||
var end = soundData.end;
|
||||
if (position >= end && soundData.completed) {
|
||||
// end of buffer
|
||||
self._audioChannel.stop();
|
||||
return;
|
||||
}
|
||||
// TODO loop
|
||||
|
||||
var count = Math.min(end - position, e.count);
|
||||
if (count === 0) return;
|
||||
|
||||
var data = e.data;
|
||||
var source = soundData.pcm;
|
||||
for (var j = 0; j < count; j++) {
|
||||
data[j] = source[position++];
|
||||
}
|
||||
|
||||
self._position = position / soundData.sampleRate / soundData.channels * 1000;
|
||||
};
|
||||
this._audioChannel.start();
|
||||
},
|
||||
_playSoundDataViaAudio: function (soundData, startTime) {
|
||||
this._position = startTime;
|
||||
var self = this;
|
||||
var element = document.createElement('audio');
|
||||
if (!element.canPlayType(soundData.mimeType))
|
||||
error('\"' + soundData.mimeType + '\" type playback is not supported by the browser');
|
||||
element.src = "data:" + soundData.mimeType + ";base64," + base64ArrayBuffer(soundData.data);
|
||||
element.addEventListener("loadeddata", function loaded() {
|
||||
element.currentTime = startTime / 1000;
|
||||
element.play();
|
||||
});
|
||||
element.addEventListener("timeupdate", function timeupdate() {
|
||||
self._position = element.currentTime * 1000;
|
||||
});
|
||||
this._element = element;
|
||||
},
|
||||
__glue__: {
|
||||
native: {
|
||||
|
@ -11,12 +60,25 @@ const SoundChannelDefinition = (function () {
|
|||
instance: {
|
||||
// (void) -> void
|
||||
stop: function stop() {
|
||||
if (this._element) {
|
||||
this._element.pause();
|
||||
}
|
||||
if (this._audioChannel) {
|
||||
this._audioChannel.stop();
|
||||
}
|
||||
},
|
||||
"position": {
|
||||
// (void) -> Number
|
||||
get: function position() {
|
||||
return this._element.currentTime * 1000;
|
||||
return this._position;
|
||||
}
|
||||
},
|
||||
"soundTransform": {
|
||||
get: function soundTransform() {
|
||||
return this._soundTransform;
|
||||
},
|
||||
set: function soundTransform(val) {
|
||||
this._soundTransform = val;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -24,3 +86,181 @@ const SoundChannelDefinition = (function () {
|
|||
}
|
||||
};
|
||||
}).call(this);
|
||||
|
||||
function createAudioChannel(sampleRate, channels) {
|
||||
if (WebAudioChannel.isSupported)
|
||||
return new WebAudioChannel(sampleRate, channels);
|
||||
else if (AudioDataChannel.isSupported)
|
||||
return new AudioDataChannel(sampleRate, channels);
|
||||
else
|
||||
error('PCM data playback is not supported by the browser');
|
||||
}
|
||||
|
||||
// Resample sound using linear interpolation for Web Audio due to
|
||||
// http://code.google.com/p/chromium/issues/detail?id=73062
|
||||
function AudioResampler(sourceRate, targetRate) {
|
||||
this.sourceRate = sourceRate;
|
||||
this.targetRate = targetRate;
|
||||
this.tail = [];
|
||||
this.sourceOffset = 0;
|
||||
}
|
||||
AudioResampler.prototype = {
|
||||
ondatarequested: function (e) { },
|
||||
getData: function (channelsData, count) {
|
||||
var k = this.sourceRate / this.targetRate;
|
||||
|
||||
var offset = this.sourceOffset;
|
||||
var needed = Math.ceil((count - 1) * k + offset) + 1;
|
||||
var sourceData = [];
|
||||
for (var channel = 0; channel < channelsData.length; channel++)
|
||||
sourceData.push(new Float32Array(needed));
|
||||
var e = { data: sourceData, count: needed };
|
||||
this.ondatarequested(e);
|
||||
for (var channel = 0; channel < channelsData.length; channel++) {
|
||||
var data = channelsData[channel];
|
||||
var source = sourceData[channel];
|
||||
for (var j = 0; j < count; j++) {
|
||||
var i = j * k + offset;
|
||||
var i1 = Math.floor(i), i2 = Math.ceil(i);
|
||||
var source_i1 = i1 < 0 ? this.tail[channel] : source[i1];
|
||||
if (i1 === i2) {
|
||||
data[j] = source_i1;
|
||||
} else {
|
||||
var alpha = i - i1;
|
||||
data[j] = source_i1 * (1 - alpha) + source[i2] * alpha;
|
||||
}
|
||||
}
|
||||
this.tail[channel] = source[needed - 1];
|
||||
}
|
||||
this.sourceOffset = ((count - 1) * k + offset) - (needed - 1);
|
||||
}
|
||||
};
|
||||
|
||||
function WebAudioChannel(sampleRate, channels) {
|
||||
var context = WebAudioChannel.context;
|
||||
if (!context) {
|
||||
if (typeof AudioContext !== 'undefined')
|
||||
context = new AudioContext();
|
||||
else
|
||||
context = new webkitAudioContext();
|
||||
WebAudioChannel.context = context;
|
||||
}
|
||||
this.context = context;
|
||||
this.contextSampleRate = context.sampleRate || 44100;
|
||||
|
||||
this.channels = channels;
|
||||
this.sampleRate = sampleRate;
|
||||
if (this.contextSampleRate != sampleRate) {
|
||||
this.resampler = new AudioResampler(sampleRate, this.contextSampleRate);
|
||||
this.resampler.ondatarequested = function (e) {
|
||||
this.requestData(e.data, e.count);
|
||||
}.bind(this);
|
||||
}
|
||||
}
|
||||
WebAudioChannel.prototype = {
|
||||
start: function () {
|
||||
var source = this.context.createScriptProcessor ?
|
||||
this.context.createScriptProcessor(2048, 0, this.channels) :
|
||||
this.context.createJavaScriptNode(2048, 0, this.channels);
|
||||
var self = this;
|
||||
source.onaudioprocess = function(e) {
|
||||
var channelsData = [];
|
||||
for (var i = 0; i < self.channels; i++)
|
||||
channelsData.push(e.outputBuffer.getChannelData(i));
|
||||
var count = channelsData[0].length;
|
||||
if (self.resampler) {
|
||||
self.resampler.getData(channelsData, count);
|
||||
} else {
|
||||
var e = { data: channelsData, count: count };
|
||||
self.requestData(channelsData, count);
|
||||
}
|
||||
};
|
||||
|
||||
source.connect(this.context.destination);
|
||||
this.source = source;
|
||||
},
|
||||
stop: function () {
|
||||
this.source.disconnect(this.context.destination);
|
||||
},
|
||||
requestData: function (channelsData, count) {
|
||||
var channels = this.channels;
|
||||
var buffer = new Float32Array(count * channels);
|
||||
var e = { data: buffer, count: buffer.length };
|
||||
this.ondatarequested(e);
|
||||
|
||||
for (var j = 0, p = 0; j < count; j++) {
|
||||
for (var i = 0; i < channels; i++)
|
||||
channelsData[i][j] = buffer[p++];
|
||||
}
|
||||
}
|
||||
};
|
||||
WebAudioChannel.isSupported = (function() {
|
||||
return typeof AudioContext !== 'undefined' ||
|
||||
typeof webkitAudioContext != 'undefined';
|
||||
})();
|
||||
|
||||
// from https://wiki.mozilla.org/Audio_Data_API
|
||||
function AudioDataChannel(sampleRate, channels) {
|
||||
this.sampleRate = sampleRate;
|
||||
this.channels = channels;
|
||||
}
|
||||
AudioDataChannel.prototype = {
|
||||
start: function () {
|
||||
var sampleRate = this.sampleRate;
|
||||
var channels = this.channels;
|
||||
var self = this;
|
||||
|
||||
// Initialize the audio output.
|
||||
var audio = new Audio();
|
||||
audio.mozSetup(channels, sampleRate);
|
||||
|
||||
var currentWritePosition = 0;
|
||||
var prebufferSize = sampleRate * channels / 2; // buffer 500ms
|
||||
var tail = null, tailPosition;
|
||||
|
||||
// The function called with regular interval to populate
|
||||
// the audio output buffer.
|
||||
this.interval = setInterval(function() {
|
||||
var written;
|
||||
// Check if some data was not written in previous attempts.
|
||||
if(tail) {
|
||||
written = audio.mozWriteAudio(tail.subarray(tailPosition));
|
||||
currentWritePosition += written;
|
||||
tailPosition += written;
|
||||
if(tailPosition < tail.length) {
|
||||
// Not all the data was written, saving the tail...
|
||||
return; // ... and exit the function.
|
||||
}
|
||||
tail = null;
|
||||
}
|
||||
|
||||
// Check if we need add some data to the audio output.
|
||||
var currentPosition = audio.mozCurrentSampleOffset();
|
||||
var available = currentPosition + prebufferSize - currentWritePosition;
|
||||
available -= available % channels; // align to channels count
|
||||
if(available > 0) {
|
||||
// Request some sound data from the callback function.
|
||||
var soundData = new Float32Array(available);
|
||||
self.requestData(soundData, available);
|
||||
|
||||
// Writting the data.
|
||||
written = audio.mozWriteAudio(soundData);
|
||||
if(written < soundData.length) {
|
||||
// Not all the data was written, saving the tail.
|
||||
tail = soundData;
|
||||
tailPosition = written;
|
||||
}
|
||||
currentWritePosition += written;
|
||||
}
|
||||
}, 100);
|
||||
},
|
||||
stop: function () {
|
||||
clearInterval(this.interval);
|
||||
},
|
||||
requestData: function (data, count) {
|
||||
this.ondatarequested({data: data, count: count});
|
||||
}
|
||||
};
|
||||
AudioDataChannel.isSupported = (function () {
|
||||
return 'mozSetup' in (new Audio);
|
||||
})();
|
||||
|
|
|
@ -63,7 +63,7 @@ function defineSound(tag, dictionary) {
|
|||
case SOUND_FORMAT_PCM_BE:
|
||||
if (tag.soundSize == SOUND_SIZE_16_BIT) {
|
||||
for (var i = 0, j = 0; i < pcm.length; i++, j += 2)
|
||||
pcm[i] = ((data[i] << 24) | (data[i + 1] << 16)) / 2147483648;
|
||||
pcm[i] = ((data[j] << 24) | (data[j + 1] << 16)) / 2147483648;
|
||||
packaged = packageWave(data, sampleRate, channels, 16, true);
|
||||
} else {
|
||||
for (var i = 0; i < pcm.length; i++)
|
||||
|
@ -74,7 +74,7 @@ function defineSound(tag, dictionary) {
|
|||
case SOUND_FORMAT_PCM_LE:
|
||||
if (tag.soundSize == SOUND_SIZE_16_BIT) {
|
||||
for (var i = 0, j = 0; i < pcm.length; i++, j += 2)
|
||||
pcm[i] = ((data[i + 1] << 24) | (data[i] << 16)) / 2147483648;
|
||||
pcm[i] = ((data[j + 1] << 24) | (data[j] << 16)) / 2147483648;
|
||||
packaged = packageWave(data, sampleRate, channels, 16, false);
|
||||
} else {
|
||||
for (var i = 0; i < pcm.length; i++)
|
||||
|
@ -153,7 +153,7 @@ function SwfSoundStream_decode_PCM(data) {
|
|||
function SwfSoundStream_decode_PCM_be(data) {
|
||||
var pcm = new Float32Array(data.length / 2);
|
||||
for (var i = 0, j = 0; i < pcm.length; i++, j += 2)
|
||||
pcm[i] = ((data[i] << 24) | (data[i + 1] << 16)) / 2147483648;
|
||||
pcm[i] = ((data[j] << 24) | (data[j + 1] << 16)) / 2147483648;
|
||||
this.currentSample += pcm.length / this.channels;
|
||||
return {
|
||||
pcm: pcm,
|
||||
|
@ -164,7 +164,7 @@ function SwfSoundStream_decode_PCM_be(data) {
|
|||
function SwfSoundStream_decode_PCM_le(data) {
|
||||
var pcm = new Float32Array(data.length / 2);
|
||||
for (var i = 0, j = 0; i < pcm.length; i++, j += 2)
|
||||
pcm[i] = ((data[i + 1] << 24) | (data[i] << 16)) / 2147483648;
|
||||
pcm[i] = ((data[j + 1] << 24) | (data[j] << 16)) / 2147483648;
|
||||
this.currentSample += pcm.length / this.channels;
|
||||
return {
|
||||
pcm: pcm,
|
||||
|
|
Загрузка…
Ссылка в новой задаче