added XRVideoFrame
Encapsulated all the functionality of video frames (receiving, managing cached ArrayBuffers, lazilly converting from base60 to binary, etc) in an object. Included methods to clean up workers (postMessageToWorker and postReplyMessage). updated simplecv
This commit is contained in:
Родитель
83d67ca365
Коммит
a8c735c040
|
@ -180,8 +180,8 @@ class XRExampleBase {
|
|||
|
||||
// request the next frame
|
||||
// buffers is an optional parameter, suggesting buffers that could be used
|
||||
requestVideoFrame(buffers) {
|
||||
this.session.requestVideoFrame(buffers);
|
||||
requestVideoFrame() {
|
||||
this.session.requestVideoFrame();
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>AR anchor example</title>
|
||||
<title>Simple Custom CV example</title>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
|
||||
<style>
|
||||
|
@ -60,122 +60,7 @@
|
|||
<p>Compute the average intensity of the video image pixels.</p>
|
||||
</div>
|
||||
<script id="worker1" type="javascript/worker">
|
||||
|
||||
/**
|
||||
* In the video callback, ev.detail contains:
|
||||
{
|
||||
"frame": {
|
||||
"buffers": [ // Array of base64 encoded string buffers
|
||||
{
|
||||
"size": {
|
||||
"width": 320,
|
||||
"height": 180,
|
||||
"bytesPerRow": 320,
|
||||
"bytesPerPixel": 1
|
||||
},
|
||||
"buffer": "e3x...d7d" /// convert to Uint8 ArrayBuffer in code below
|
||||
},
|
||||
{
|
||||
"size": {
|
||||
"width": 160,
|
||||
"height": 90,
|
||||
"bytesPerRow": 320,
|
||||
"bytesPerPixel": 2
|
||||
},
|
||||
"buffer": "ZZF.../fIJ7" /// convert to Uint8 ArrayBuffer in code below
|
||||
}
|
||||
],
|
||||
"pixelFormatType": "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange",
|
||||
"pixelFormat": "YUV420P", /// Added in the code below, clients should ignore pixelFormatType
|
||||
"timestamp": 337791
|
||||
},
|
||||
"camera": {
|
||||
"cameraIntrinsics": [3x3 matrix],
|
||||
fx 0 px
|
||||
0 fy py
|
||||
0 0 1
|
||||
fx and fy are the focal length in pixels.
|
||||
px and py are the coordinates of the principal point in pixels.
|
||||
The origin is at the center of the upper-left pixel.
|
||||
|
||||
"cameraImageResolution": {
|
||||
"width": 1280,
|
||||
"height": 720
|
||||
},
|
||||
"viewMatrix": [4x4 camera view matrix],
|
||||
"interfaceOrientation": 3,
|
||||
// 0 UIDeviceOrientationUnknown
|
||||
// 1 UIDeviceOrientationPortrait
|
||||
// 2 UIDeviceOrientationPortraitUpsideDown
|
||||
// 3 UIDeviceOrientationLandscapeRight
|
||||
// 4 UIDeviceOrientationLandscapeLeft
|
||||
"projectionMatrix": [4x4 camera projection matrix]
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
var intensity = 0.0;
|
||||
var cr = -1;
|
||||
var cb = -1;
|
||||
|
||||
averageIntensity = function (buffer) {
|
||||
var w = buffer.size.width;
|
||||
var h = buffer.size.height;
|
||||
var pad = buffer.size.bytesPerRow - w;
|
||||
var pixels = new Uint8Array(buffer.buffer);
|
||||
|
||||
intensity = 0.0;
|
||||
var p = 0;
|
||||
for (var r = 0; r < h; r++) {
|
||||
var v = 0;
|
||||
for (var i = 0; i < w; i++) {
|
||||
if (p < pixels.length) {
|
||||
v += pixels[p++]
|
||||
} else {
|
||||
console.error("overflow pixel buffer")
|
||||
}
|
||||
}
|
||||
intensity += v / w;
|
||||
p += pad;
|
||||
}
|
||||
intensity = (intensity / h) / 255.0;
|
||||
}
|
||||
|
||||
colorAtCenter = function(buffer) {
|
||||
var w = buffer.size.width;
|
||||
var h = buffer.size.height;
|
||||
var pixels = new Uint8Array(buffer.buffer);
|
||||
|
||||
var cx = Math.floor(w / 2) * buffer.size.bytesPerPixel
|
||||
var cy = Math.floor(h / 2)
|
||||
var p = cy * buffer.size.bytesPerRow + cx;
|
||||
cb = pixels[p++];
|
||||
cr = pixels[p];
|
||||
}
|
||||
|
||||
self.addEventListener('message', function(event){
|
||||
var frame = event.data.frame
|
||||
var camera = event.data.camera
|
||||
switch (frame.pixelFormat) {
|
||||
case "YUV420P":
|
||||
this.averageIntensity(frame.buffers[0])
|
||||
this.colorAtCenter(frame.buffers[1])
|
||||
|
||||
// pass the buffers back or they will be garbage collected
|
||||
var buffers = frame.buffers
|
||||
var buffs = []
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
buffs.push(buffers[i].buffer)
|
||||
}
|
||||
|
||||
postMessage ({intensity: intensity, cr: cr, cb: cb, buffers: buffs, frame: frame}, buffs);
|
||||
}
|
||||
});
|
||||
|
||||
// setInterval( function(){
|
||||
// console.log("Help me!")
|
||||
// self.postMessage (Math.random() * 255.0);
|
||||
//}, 500);
|
||||
// need to figure out how to make loadScripts() work in BlobURLs
|
||||
</script>
|
||||
<script>
|
||||
// RAINBOW Candy of a Certain Name colors
|
||||
|
@ -223,26 +108,30 @@
|
|||
}
|
||||
|
||||
newSession() {
|
||||
var blob = new Blob([
|
||||
document.querySelector('#worker1').textContent
|
||||
], { type: "text/javascript" })
|
||||
// var blob = new Blob([
|
||||
// document.querySelector('#worker1').textContent
|
||||
// ], { type: "text/javascript" })
|
||||
// this.worker = new Worker(window.URL.createObjectURL(blob));
|
||||
|
||||
this.worker = new Worker("worker.js")
|
||||
|
||||
this.worker = new Worker(window.URL.createObjectURL(blob));
|
||||
|
||||
var self = this;
|
||||
this.worker.onmessage = function(ev) {
|
||||
var videoFrame = XRVideoFrame.createFromMessage(ev)
|
||||
self.intensity = ev.data.intensity;
|
||||
self.cr = ev.data.cr;
|
||||
self.cb = ev.data.cb;
|
||||
self.handleVisionDone(ev.data.frame, ev.data.buffers);
|
||||
self.handleVisionDone(videoFrame);
|
||||
}
|
||||
|
||||
this.worker.addEventListener('error', (e) => {
|
||||
console.log("worker error:" + e)
|
||||
})
|
||||
|
||||
//this.setVideoWorker(this.worker);
|
||||
this.setVideoWorker(ev => { this.handleVideoFrame(ev) })
|
||||
this.setVideoWorker(this.worker);
|
||||
|
||||
// use this instead to not use worker.
|
||||
//this.setVideoWorker(ev => { this.handleVideoFrame(ev) })
|
||||
}
|
||||
|
||||
// Called during construction
|
||||
|
@ -292,7 +181,7 @@
|
|||
}
|
||||
|
||||
|
||||
handleVisionDone(frame, buffers) {
|
||||
handleVisionDone(videoFrame) {
|
||||
var txt = "ARKit Light Estimate: " + this.lightEstimate.toFixed(2) + "<br>CV Average Intensity: " + this.intensity.toFixed(2)
|
||||
+ "<br>Center R/B: " + this.cr.toFixed(2) + " / " + this.cb.toFixed(2) + "<br><center>";
|
||||
|
||||
|
@ -311,8 +200,9 @@
|
|||
|
||||
this.messageText = txt;
|
||||
|
||||
if (makeTexUV) {
|
||||
var buffer = frame.buffers[1];
|
||||
// in the worker case, if we don't access the buffer in the worker, it won't be pulled back
|
||||
if (makeTexUV && videoFrame.buffer(1).buffer) {
|
||||
var buffer = videoFrame.buffer(1);
|
||||
var buff = buffer.buffer;
|
||||
if (this.texSize != (buff.byteLength /2 *3)) {
|
||||
this.texSize = buff.byteLength /2 * 3
|
||||
|
@ -332,7 +222,9 @@
|
|||
this.uvTexture.needsUpdate = true;
|
||||
}
|
||||
updateCVFPS();
|
||||
this.requestVideoFrame(buffers);
|
||||
|
||||
videoFrame.release();
|
||||
this.requestVideoFrame();
|
||||
}
|
||||
|
||||
//////
|
||||
|
@ -374,21 +266,14 @@
|
|||
}
|
||||
|
||||
handleVideoFrame(ev) {
|
||||
var frame = ev.detail.frame
|
||||
var camera = ev.detail.camera
|
||||
switch (frame.pixelFormat) {
|
||||
case "YUV420P":
|
||||
this.averageIntensity(frame.buffers[0])
|
||||
this.colorAtCenter(frame.buffers[1])
|
||||
var videoFrame = ev.detail
|
||||
switch (videoFrame.pixelFormat) {
|
||||
case XRVideoFrame.IMAGEFORMAT_YUV420P:
|
||||
this.averageIntensity(videoFrame.buffer(0))
|
||||
this.colorAtCenter(videoFrame.buffer(1))
|
||||
}
|
||||
|
||||
// pass the buffers back or they will be garbage collected
|
||||
var buffers = frame.buffers
|
||||
var buffs = []
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
buffs.push(buffers[i].buffer)
|
||||
}
|
||||
this.handleVisionDone(frame, buffers);
|
||||
this.handleVisionDone(videoFrame);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,250 @@
|
|||
/*
|
||||
Copyright (c) 2011, Daniel Guerrero
|
||||
All rights reserved.
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL DANIEL GUERRERO BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Uses the new array typed in javascript to binary base64 encode/decode
|
||||
* at the moment just decodes a binary base64 encoded
|
||||
* into either an ArrayBuffer (decodeArrayBuffer)
|
||||
* or into an Uint8Array (decode)
|
||||
*
|
||||
* References:
|
||||
* https://developer.mozilla.org/en/JavaScript_typed_arrays/ArrayBuffer
|
||||
* https://developer.mozilla.org/en/JavaScript_typed_arrays/Uint8Array
|
||||
*/
|
||||
|
||||
var Base64Binary = {
|
||||
_keyStr : "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
|
||||
|
||||
/* will return a Uint8Array type */
|
||||
decodeArrayBuffer: function(input, buffer) {
|
||||
var bytes = (input.length/4) * 3;
|
||||
if (!buffer || buffer.byteLength != bytes) {
|
||||
// replace the buffer with a new, appropriately sized one
|
||||
buffer = new ArrayBuffer(bytes);
|
||||
}
|
||||
this.decode(input, buffer);
|
||||
|
||||
return buffer;
|
||||
},
|
||||
|
||||
|
||||
decode: function(input, arrayBuffer) {
|
||||
//get last chars to see if are valid
|
||||
var lkey1 = this._keyStr.indexOf(input.charAt(input.length-1));
|
||||
var lkey2 = this._keyStr.indexOf(input.charAt(input.length-2));
|
||||
|
||||
var bytes = (input.length/4) * 3;
|
||||
if (lkey1 == 64) bytes--; //padding chars, so skip
|
||||
if (lkey2 == 64) bytes--; //padding chars, so skip
|
||||
|
||||
var uarray;
|
||||
var chr1, chr2, chr3;
|
||||
var enc1, enc2, enc3, enc4;
|
||||
var i = 0;
|
||||
var j = 0;
|
||||
|
||||
if (arrayBuffer)
|
||||
uarray = new Uint8Array(arrayBuffer);
|
||||
else
|
||||
uarray = new Uint8Array(bytes);
|
||||
|
||||
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");
|
||||
|
||||
for (i=0; i<bytes; i+=3) {
|
||||
//get the 3 octects in 4 ascii chars
|
||||
enc1 = this._keyStr.indexOf(input.charAt(j++));
|
||||
enc2 = this._keyStr.indexOf(input.charAt(j++));
|
||||
enc3 = this._keyStr.indexOf(input.charAt(j++));
|
||||
enc4 = this._keyStr.indexOf(input.charAt(j++));
|
||||
|
||||
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||
|
||||
uarray[i] = chr1;
|
||||
if (enc3 != 64) uarray[i+1] = chr2;
|
||||
if (enc4 != 64) uarray[i+2] = chr3;
|
||||
}
|
||||
|
||||
return uarray;
|
||||
}
|
||||
}
|
||||
|
||||
//importScripts('webxr-worker.js');
|
||||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
|
||||
|
||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||||
|
||||
// store unused ArrayBuffers
|
||||
// we'll push to it on release, pop from it when we need a new one. In the common case, where the
|
||||
// same camera setup is running and the same cv is running, we should get some speed up by not reallocating
|
||||
// because the same size and number of buffers will be pushed/popped in the same order
|
||||
var _ab = [];
|
||||
|
||||
var XRVideoFrame = function () {
|
||||
function XRVideoFrame(buffers, pixelFormat, timestamp, camera) {
|
||||
_classCallCheck(this, XRVideoFrame);
|
||||
|
||||
this._buffers = buffers;
|
||||
this._pixelFormat = pixelFormat;
|
||||
this._timestamp = timestamp;
|
||||
this._camera = camera;
|
||||
}
|
||||
|
||||
_createClass(XRVideoFrame, [{
|
||||
key: "numBuffers",
|
||||
value: function numBuffers() {
|
||||
this._buffers.length;
|
||||
}
|
||||
}, {
|
||||
key: "buffer",
|
||||
value: function buffer(index) {
|
||||
if (index >= 0 && index < this._buffers.length) {
|
||||
var buff = this._buffers[index];
|
||||
if (typeof buff.buffer == "string") {
|
||||
var bufflen = buff.buffer.length;
|
||||
buff.buffer = Base64Binary.decodeArrayBuffer(buff.buffer, _ab.length > 0 ? _ab.pop() : null);
|
||||
var buffersize = buff.buffer.byteLength;
|
||||
var imagesize = buff.size.height * buff.size.bytesPerRow;
|
||||
}
|
||||
return buff;
|
||||
}
|
||||
}
|
||||
}, {
|
||||
key: "release",
|
||||
value: function release() {
|
||||
// if buffers are passed in, check if they are ArrayBuffers, and if so, save
|
||||
// them for possible use on the next frame.
|
||||
//
|
||||
// we do this because passing buffers down into Workers invalidates them, so we need to
|
||||
// return them here when we get them back from the Worker, so they can be reused.
|
||||
var buffers = this._buffers;
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
if (buffers[i].buffer instanceof ArrayBuffer) {
|
||||
_ab.push(buffers[i].buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, {
|
||||
key: "postMessageToWorker",
|
||||
value: function postMessageToWorker(worker, options) {
|
||||
var msg = Object.assign({}, options || {});
|
||||
msg.buffers = this._buffers;
|
||||
msg.timestamp = this._timestamp;
|
||||
msg.pixelFormat = this._pixelFormat;
|
||||
msg.camera = this._camera;
|
||||
|
||||
var buffs = [];
|
||||
for (var i = 0; i < msg.buffers.length; i++) {
|
||||
if (msg.buffers[i].buffer instanceof ArrayBuffer) {
|
||||
buffs.push(msg.buffers[i].buffer);
|
||||
}
|
||||
}
|
||||
worker.postMessage(msg, buffs);
|
||||
}
|
||||
}, {
|
||||
key: "postReplyMessage",
|
||||
value: function postReplyMessage(options) {
|
||||
var msg = Object.assign({}, options);
|
||||
msg.buffers = this._buffers;
|
||||
msg.timestamp = this._timestamp;
|
||||
msg.pixelFormat = this._pixelFormat;
|
||||
msg.camera = this._camera;
|
||||
|
||||
var buffs = [];
|
||||
for (var i = 0; i < msg.buffers.length; i++) {
|
||||
if (msg.buffers[i].buffer instanceof ArrayBuffer) {
|
||||
// any array buffers should be marked for transfer
|
||||
buffs.push(msg.buffers[i].buffer);
|
||||
} else {
|
||||
// if we passed in a string, and it didn't get accessed, we shouldn't pass it back out
|
||||
msg.buffers.buffer[i] = null;
|
||||
}
|
||||
}
|
||||
postMessage(msg, buffs);
|
||||
}
|
||||
}, {
|
||||
key: "pixelFormat",
|
||||
get: function get() {
|
||||
return this._pixelFormat;
|
||||
}
|
||||
}, {
|
||||
key: "timestamp",
|
||||
get: function get() {
|
||||
return this._timestamp;
|
||||
}
|
||||
}, {
|
||||
key: "camera",
|
||||
get: function get() {
|
||||
return this._camera;
|
||||
}
|
||||
}], [{
|
||||
key: "createFromMessage",
|
||||
value: function createFromMessage(event) {
|
||||
return new this(event.data.buffers, event.data.pixelFormat, event.data.timestamp, event.data.camera);
|
||||
}
|
||||
}]);
|
||||
|
||||
return XRVideoFrame;
|
||||
}();
|
||||
/*
|
||||
ImageFormat taken from
|
||||
https://w3c.github.io/mediacapture-worker/#imagebitmap-extensions
|
||||
|
||||
enum ImageFormat {
|
||||
"RGBA32",
|
||||
"BGRA32",
|
||||
"RGB24",
|
||||
"BGR24",
|
||||
"GRAY8",
|
||||
"YUV444P",
|
||||
"YUV422P",
|
||||
"YUV420P",
|
||||
"YUV420SP_NV12",
|
||||
"YUV420SP_NV21",
|
||||
"HSV",
|
||||
"Lab",
|
||||
"DEPTH",
|
||||
// empty string
|
||||
""
|
||||
};
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
XRVideoFrame.IMAGEFORMAT_RGBA32 = "RGBA32";
|
||||
XRVideoFrame.IMAGEFORMAT_BGRA32 = "BGRA32";
|
||||
XRVideoFrame.IMAGEFORMAT_RGB24 = "RGB24";
|
||||
XRVideoFrame.IMAGEFORMAT_BGR24 = "BGR24";
|
||||
XRVideoFrame.IMAGEFORMAT_GRAY8 = "GRAY8";
|
||||
XRVideoFrame.IMAGEFORMAT_YUV444P = "YUV444P";
|
||||
XRVideoFrame.IMAGEFORMAT_YUV422P = "YUV422P";
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420P = "YUV420P";
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420SP_NV12 = "YUV420SP_NV12";
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420SP_NV21 = "YUV420SP_NV21";
|
||||
XRVideoFrame.IMAGEFORMAT_HSV = "HSV";
|
||||
XRVideoFrame.IMAGEFORMAT_Lab = "Lab";
|
||||
XRVideoFrame.IMAGEFORMAT_DEPTH = "DEPTH";
|
||||
XRVideoFrame.IMAGEFORMAT_NULL = "";
|
||||
|
||||
XRVideoFrame.IMAGEFORMAT = [XRVideoFrame.IMAGEFORMAT_RGBA32, XRVideoFrame.IMAGEFORMAT_BGRA32, XRVideoFrame.IMAGEFORMAT_RGB24, XRVideoFrame.IMAGEFORMAT_BGR24, XRVideoFrame.IMAGEFORMAT_GRAY8, XRVideoFrame.IMAGEFORMAT_YUV444P, XRVideoFrame.IMAGEFORMAT_YUV422P, XRVideoFrame.IMAGEFORMAT_YUV420P, XRVideoFrame.IMAGEFORMAT_YUV420SP_NV12, XRVideoFrame.IMAGEFORMAT_YUV420SP_NV21, XRVideoFrame.IMAGEFORMAT_HSV, XRVideoFrame.IMAGEFORMAT_Lab, XRVideoFrame.IMAGEFORMAT_DEPTH, XRVideoFrame.IMAGEFORMAT_NULL];
|
|
@ -0,0 +1,119 @@
|
|||
importScripts('webxr-worker.js')
|
||||
//importScripts('../../dist/webxr-worker.js')
|
||||
/**
|
||||
* In the video callback, ev.detail contains:
|
||||
{
|
||||
"frame": {
|
||||
"buffers": [ // Array of base64 encoded string buffers
|
||||
{
|
||||
"size": {
|
||||
"width": 320,
|
||||
"height": 180,
|
||||
"bytesPerRow": 320,
|
||||
"bytesPerPixel": 1
|
||||
},
|
||||
"buffer": "e3x...d7d" /// convert to Uint8 ArrayBuffer in code below
|
||||
},
|
||||
{
|
||||
"size": {
|
||||
"width": 160,
|
||||
"height": 90,
|
||||
"bytesPerRow": 320,
|
||||
"bytesPerPixel": 2
|
||||
},
|
||||
"buffer": "ZZF.../fIJ7" /// convert to Uint8 ArrayBuffer in code below
|
||||
}
|
||||
],
|
||||
"pixelFormatType": "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange",
|
||||
"pixelFormat": "YUV420P", /// Added in the code below, clients should ignore pixelFormatType
|
||||
"timestamp": 337791
|
||||
},
|
||||
"camera": {
|
||||
"cameraIntrinsics": [3x3 matrix],
|
||||
fx 0 px
|
||||
0 fy py
|
||||
0 0 1
|
||||
fx and fy are the focal length in pixels.
|
||||
px and py are the coordinates of the principal point in pixels.
|
||||
The origin is at the center of the upper-left pixel.
|
||||
|
||||
"cameraImageResolution": {
|
||||
"width": 1280,
|
||||
"height": 720
|
||||
},
|
||||
"viewMatrix": [4x4 camera view matrix],
|
||||
"interfaceOrientation": 3,
|
||||
// 0 UIDeviceOrientationUnknown
|
||||
// 1 UIDeviceOrientationPortrait
|
||||
// 2 UIDeviceOrientationPortraitUpsideDown
|
||||
// 3 UIDeviceOrientationLandscapeRight
|
||||
// 4 UIDeviceOrientationLandscapeLeft
|
||||
"projectionMatrix": [4x4 camera projection matrix]
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
var intensity = 0.0;
|
||||
var cr = -1;
|
||||
var cb = -1;
|
||||
|
||||
averageIntensity = function (buffer) {
|
||||
var w = buffer.size.width;
|
||||
var h = buffer.size.height;
|
||||
var pad = buffer.size.bytesPerRow - w;
|
||||
var pixels = new Uint8Array(buffer.buffer);
|
||||
|
||||
intensity = 0.0;
|
||||
var p = 0;
|
||||
for (var r = 0; r < h; r++) {
|
||||
var v = 0;
|
||||
for (var i = 0; i < w; i++) {
|
||||
if (p < pixels.length) {
|
||||
v += pixels[p++]
|
||||
} else {
|
||||
console.error("overflow pixel buffer")
|
||||
}
|
||||
}
|
||||
intensity += v / w;
|
||||
p += pad;
|
||||
}
|
||||
intensity = (intensity / h) / 255.0;
|
||||
}
|
||||
|
||||
colorAtCenter = function(buffer) {
|
||||
var w = buffer.size.width;
|
||||
var h = buffer.size.height;
|
||||
var pixels = new Uint8Array(buffer.buffer);
|
||||
|
||||
var cx = Math.floor(w / 2) * buffer.size.bytesPerPixel
|
||||
var cy = Math.floor(h / 2)
|
||||
var p = cy * buffer.size.bytesPerRow + cx;
|
||||
cb = pixels[p++];
|
||||
cr = pixels[p];
|
||||
}
|
||||
|
||||
self.addEventListener('message', function(event){
|
||||
var videoFrame = XRVideoFrame.createFromMessage(event);
|
||||
|
||||
switch (videoFrame.pixelFormat) {
|
||||
case XRVideoFrame.IMAGEFORMAT_YUV420P:
|
||||
this.averageIntensity(videoFrame.buffer(0))
|
||||
this.colorAtCenter(videoFrame.buffer(1))
|
||||
|
||||
// // pass the buffers back or they will be garbage collected
|
||||
// var buffers = frame.buffers
|
||||
// var buffs = []
|
||||
// for (var i = 0; i < buffers.length; i++) {
|
||||
// buffs.push(buffers[i].buffer)
|
||||
// }
|
||||
// postMessage ({intensity: intensity, cr: cr, cb: cb, buffers: buffs, frame: frame}, buffs);
|
||||
videoFrame.postReplyMessage({intensity: intensity, cr: cr, cb: cb})
|
||||
}
|
||||
videoFrame.release();
|
||||
});
|
||||
|
||||
// setInterval( function(){
|
||||
// console.log("Help me!")
|
||||
// self.postMessage (Math.random() * 255.0);
|
||||
//}, 500);
|
|
@ -59,7 +59,7 @@ export default class XRDisplay extends EventHandlerBase {
|
|||
})
|
||||
}
|
||||
|
||||
_requestVideoFrame(buffers) {}
|
||||
_requestVideoFrame() {}
|
||||
|
||||
_requestAnimationFrame(callback){
|
||||
return window.requestAnimationFrame(callback)
|
||||
|
|
|
@ -16,6 +16,7 @@ import XRCoordinateSystem from './XRCoordinateSystem.js'
|
|||
import XRViewPose from './XRViewPose.js'
|
||||
import XRLayer from './XRLayer.js'
|
||||
import XRWebGLLayer from './XRWebGLLayer.js'
|
||||
import XRVideoFrame from './XRVideoFrame.js'
|
||||
|
||||
import EventHandlerBase from './fill/EventHandlerBase.js'
|
||||
import FlatDisplay from './display/FlatDisplay.js'
|
||||
|
@ -50,6 +51,7 @@ class XRPolyfill extends EventHandlerBase {
|
|||
window.XRViewPose = XRViewPose
|
||||
window.XRLayer = XRLayer
|
||||
window.XRWebGLLayer = XRWebGLLayer
|
||||
window.XRVideoFrame = XRVideoFrame
|
||||
|
||||
this._getVRDisplaysFinished = false;
|
||||
|
||||
|
|
|
@ -74,20 +74,21 @@ export default class XRSession extends EventHandlerBase {
|
|||
if (callback instanceof Worker) {
|
||||
var worker = callback;
|
||||
callback = (ev => {
|
||||
var cv = ev.detail
|
||||
var buffers = cv.frame.buffers
|
||||
var buffs = []
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
buffs.push(buffers[i].buffer)
|
||||
}
|
||||
worker.postMessage(cv, buffs);
|
||||
// var cv = ev.detail
|
||||
// var buffers = cv.frame.buffers
|
||||
// var buffs = []
|
||||
// for (var i = 0; i < buffers.length; i++) {
|
||||
// buffs.push(buffers[i].buffer)
|
||||
// }
|
||||
// worker.postMessage(cv, buffs);
|
||||
ev.detail.postMessageToWorker(worker)
|
||||
})
|
||||
}
|
||||
this._display.addEventListener("videoFrame", callback)
|
||||
}
|
||||
|
||||
requestVideoFrame(buffers) {
|
||||
this._display._requestVideoFrame(buffers);
|
||||
requestVideoFrame() {
|
||||
this._display._requestVideoFrame();
|
||||
}
|
||||
|
||||
_createPresentationFrame(){
|
||||
|
|
|
@ -0,0 +1,213 @@
|
|||
import * as glMatrix from "./fill/gl-matrix/common.js";
|
||||
import * as mat4 from "./fill/gl-matrix/mat4.js";
|
||||
import * as quat from "./fill/gl-matrix/quat.js";
|
||||
import * as vec3 from "./fill/gl-matrix/vec3.js";
|
||||
import base64 from "./fill/base64-binary.js";
|
||||
|
||||
/*
|
||||
XRVideoFrame represents the a video frame from a camera.
|
||||
*/
|
||||
|
||||
/*
|
||||
ARKit WebXR Viewer current injects this structure:
|
||||
{
|
||||
"frame": {
|
||||
"buffers": [ // Array of base64 encoded string buffers
|
||||
{
|
||||
"size": {
|
||||
"width": 320,
|
||||
"height": 180,
|
||||
"bytesPerRow": 320,
|
||||
"bytesPerPixel": 1
|
||||
},
|
||||
"buffer": "e3x...d7d" /// convert to Uint8 buffer in code below
|
||||
},
|
||||
{
|
||||
"size": {
|
||||
"width": 160,
|
||||
"height": 90,
|
||||
"bytesPerRow": 320,
|
||||
"bytesPerPixel": 2
|
||||
},
|
||||
"buffer": "ZZF.../fIJ7" /// convert to Uint8 buffer in code below
|
||||
}
|
||||
],
|
||||
"pixelFormatType": "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange",
|
||||
"pixelFormat": "YUV420P", /// Added in the code below, clients should ignore pixelFormatType
|
||||
"timestamp": 337791
|
||||
},
|
||||
"camera": {
|
||||
"cameraIntrinsics": [3x3 matrix],
|
||||
fx 0 px
|
||||
0 fy py
|
||||
0 0 1
|
||||
fx and fy are the focal length in pixels.
|
||||
px and py are the coordinates of the principal point in pixels.
|
||||
The origin is at the center of the upper-left pixel.
|
||||
|
||||
"cameraImageResolution": {
|
||||
"width": 1280,
|
||||
"height": 720
|
||||
},
|
||||
"viewMatrix": [4x4 camera view matrix],
|
||||
"interfaceOrientation": 3,
|
||||
// 0 UIDeviceOrientationUnknown
|
||||
// 1 UIDeviceOrientationPortrait
|
||||
// 2 UIDeviceOrientationPortraitUpsideDown
|
||||
// 3 UIDeviceOrientationLandscapeRight
|
||||
// 4 UIDeviceOrientationLandscapeLeft
|
||||
"projectionMatrix": [4x4 camera projection matrix]
|
||||
}
|
||||
}
|
||||
|
||||
We'll just pass in frame and buffer.
|
||||
|
||||
frame.buffers.buffer[*] can be String (which will be lazily converted to ArrayBuffer) or ArrayBuffer.
|
||||
frame.pixelFormatType will be ignored
|
||||
pixelFormat should be one of XRVideoFrame.IMAGEFORMAT
|
||||
|
||||
*/
|
||||
|
||||
// store unused ArrayBuffers
|
||||
// we'll push to it on release, pop from it when we need a new one. In the common case, where the
|
||||
// same camera setup is running and the same cv is running, we should get some speed up by not reallocating
|
||||
// because the same size and number of buffers will be pushed/popped in the same order
|
||||
var _ab = []
|
||||
|
||||
export default class XRVideoFrame {
|
||||
constructor(buffers, pixelFormat, timestamp, camera){
|
||||
this._buffers = buffers
|
||||
this._pixelFormat = pixelFormat
|
||||
this._timestamp = timestamp
|
||||
this._camera = camera
|
||||
}
|
||||
|
||||
static createFromMessage (event) {
|
||||
return new this(event.data.buffers, event.data.pixelFormat, event.data.timestamp, event.data.camera)
|
||||
}
|
||||
|
||||
numBuffers() {this._buffers.length}
|
||||
|
||||
buffer(index) {
|
||||
if (index >= 0 && index < this._buffers.length) {
|
||||
var buff = this._buffers[index]
|
||||
if (typeof buff.buffer == "string") {
|
||||
var bufflen = buff.buffer.length;
|
||||
buff.buffer = base64.decodeArrayBuffer(buff.buffer, _ab.length > 0 ? _ab.pop() : null);
|
||||
var buffersize = buff.buffer.byteLength;
|
||||
var imagesize = buff.size.height * buff.size.bytesPerRow;
|
||||
}
|
||||
return buff;
|
||||
}
|
||||
}
|
||||
|
||||
get pixelFormat(){ return this._pixelFormat }
|
||||
get timestamp(){ return this._timestamp }
|
||||
get camera(){ return this._camera }
|
||||
|
||||
release () {
|
||||
// if buffers are passed in, check if they are ArrayBuffers, and if so, save
|
||||
// them for possible use on the next frame.
|
||||
//
|
||||
// we do this because passing buffers down into Workers invalidates them, so we need to
|
||||
// return them here when we get them back from the Worker, so they can be reused.
|
||||
var buffers = this._buffers;
|
||||
for (var i=0; i< buffers.length; i++) {
|
||||
if (buffers[i].buffer instanceof ArrayBuffer) {
|
||||
_ab.push(buffers[i].buffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
postMessageToWorker (worker, options) {
|
||||
var msg = Object.assign({}, options || {})
|
||||
msg.buffers = this._buffers
|
||||
msg.timestamp = this._timestamp
|
||||
msg.pixelFormat = this._pixelFormat
|
||||
msg.camera = this._camera
|
||||
|
||||
var buffs = []
|
||||
for (var i = 0; i < msg.buffers.length; i++) {
|
||||
if (msg.buffers[i].buffer instanceof ArrayBuffer) {
|
||||
buffs.push(msg.buffers[i].buffer)
|
||||
}
|
||||
}
|
||||
worker.postMessage(msg, buffs);
|
||||
}
|
||||
|
||||
postReplyMessage (options) {
|
||||
var msg = Object.assign({}, options)
|
||||
msg.buffers = this._buffers
|
||||
msg.timestamp = this._timestamp
|
||||
msg.pixelFormat = this._pixelFormat
|
||||
msg.camera = this._camera
|
||||
|
||||
var buffs = []
|
||||
for (var i = 0; i < msg.buffers.length; i++) {
|
||||
if (msg.buffers[i].buffer instanceof ArrayBuffer) {
|
||||
// any array buffers should be marked for transfer
|
||||
buffs.push(msg.buffers[i].buffer)
|
||||
} else {
|
||||
// if we passed in a string, and it didn't get accessed, we shouldn't pass it back out
|
||||
msg.buffers.buffer[i] = null
|
||||
}
|
||||
}
|
||||
postMessage(msg, buffs);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
ImageFormat taken from
|
||||
https://w3c.github.io/mediacapture-worker/#imagebitmap-extensions
|
||||
|
||||
enum ImageFormat {
|
||||
"RGBA32",
|
||||
"BGRA32",
|
||||
"RGB24",
|
||||
"BGR24",
|
||||
"GRAY8",
|
||||
"YUV444P",
|
||||
"YUV422P",
|
||||
"YUV420P",
|
||||
"YUV420SP_NV12",
|
||||
"YUV420SP_NV21",
|
||||
"HSV",
|
||||
"Lab",
|
||||
"DEPTH",
|
||||
// empty string
|
||||
""
|
||||
};
|
||||
|
||||
|
||||
*/
|
||||
XRVideoFrame.IMAGEFORMAT_RGBA32 = "RGBA32"
|
||||
XRVideoFrame.IMAGEFORMAT_BGRA32 = "BGRA32"
|
||||
XRVideoFrame.IMAGEFORMAT_RGB24 = "RGB24"
|
||||
XRVideoFrame.IMAGEFORMAT_BGR24 = "BGR24"
|
||||
XRVideoFrame.IMAGEFORMAT_GRAY8 = "GRAY8"
|
||||
XRVideoFrame.IMAGEFORMAT_YUV444P = "YUV444P"
|
||||
XRVideoFrame.IMAGEFORMAT_YUV422P = "YUV422P"
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420P = "YUV420P"
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420SP_NV12 = "YUV420SP_NV12"
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420SP_NV21 = "YUV420SP_NV21"
|
||||
XRVideoFrame.IMAGEFORMAT_HSV = "HSV"
|
||||
XRVideoFrame.IMAGEFORMAT_Lab = "Lab"
|
||||
XRVideoFrame.IMAGEFORMAT_DEPTH = "DEPTH"
|
||||
XRVideoFrame.IMAGEFORMAT_NULL = ""
|
||||
|
||||
XRVideoFrame.IMAGEFORMAT = [
|
||||
XRVideoFrame.IMAGEFORMAT_RGBA32,
|
||||
XRVideoFrame.IMAGEFORMAT_BGRA32,
|
||||
XRVideoFrame.IMAGEFORMAT_RGB24,
|
||||
XRVideoFrame.IMAGEFORMAT_BGR24,
|
||||
XRVideoFrame.IMAGEFORMAT_GRAY8,
|
||||
XRVideoFrame.IMAGEFORMAT_YUV444P,
|
||||
XRVideoFrame.IMAGEFORMAT_YUV422P,
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420P,
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420SP_NV12,
|
||||
XRVideoFrame.IMAGEFORMAT_YUV420SP_NV21,
|
||||
XRVideoFrame.IMAGEFORMAT_HSV,
|
||||
XRVideoFrame.IMAGEFORMAT_Lab,
|
||||
XRVideoFrame.IMAGEFORMAT_DEPTH,
|
||||
XRVideoFrame.IMAGEFORMAT_NULL
|
||||
]
|
|
@ -0,0 +1,27 @@
|
|||
import XRAnchor from './XRAnchor.js'
|
||||
import XRAnchorOffset from './XRAnchorOffset.js'
|
||||
import XRCoordinateSystem from './XRCoordinateSystem.js'
|
||||
import XRViewPose from './XRViewPose.js'
|
||||
import XRVideoFrame from './XRVideoFrame.js'
|
||||
import EventHandlerBase from './fill/EventHandlerBase.js'
|
||||
|
||||
/*
|
||||
XRPolyfill implements the window.XR functionality as a polyfill
|
||||
|
||||
Code below will check for window.XR and if it doesn't exist will install this polyfill,
|
||||
so you can safely include this script in any page.
|
||||
*/
|
||||
export default class XRWorkerPolyfill extends EventHandlerBase {
|
||||
constructor(){
|
||||
super()
|
||||
self.XRAnchor = XRAnchor
|
||||
self.XRAnchorOffset = XRAnchorOffset
|
||||
self.XRView = XRView
|
||||
self.XRCoordinateSystem = XRCoordinateSystem
|
||||
self.XRViewPose = XRViewPose
|
||||
self.XRVideoFrame = XRVideoFrame
|
||||
}
|
||||
}
|
||||
|
||||
/* Install XRWorkerPolyfill if self.XR does not exist */
|
||||
WorkerGlobalScope.XR = new XRWorkerPolyfill()
|
|
@ -214,8 +214,8 @@ export default class FlatDisplay extends XRDisplay {
|
|||
}
|
||||
}
|
||||
|
||||
_requestVideoFrame(buffers) {
|
||||
this._arKitWrapper._requestComputerVisionData(buffers)
|
||||
_requestVideoFrame(bufers) {
|
||||
this._arKitWrapper._requestComputerVisionData()
|
||||
}
|
||||
|
||||
_createSession(parameters=null){
|
||||
|
|
|
@ -41,24 +41,24 @@ export default class ARKitWrapper extends EventHandlerBase {
|
|||
this._rawARData = null
|
||||
|
||||
// worker to convert buffers
|
||||
var blobURL = this._buildWorkerBlob()
|
||||
this._worker = new Worker(blobURL);
|
||||
URL.revokeObjectURL(blobURL);
|
||||
// var blobURL = this._buildWorkerBlob()
|
||||
// this._worker = new Worker(blobURL);
|
||||
// URL.revokeObjectURL(blobURL);
|
||||
|
||||
var self = this;
|
||||
this._worker.onmessage = function (ev) {
|
||||
setTimeout(function () {
|
||||
self.dispatchEvent(
|
||||
new CustomEvent(
|
||||
ARKitWrapper.COMPUTER_VISION_DATA,
|
||||
{
|
||||
source: self,
|
||||
detail: ev.data
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
// var self = this;
|
||||
// this._worker.onmessage = function (ev) {
|
||||
// setTimeout(function () {
|
||||
// self.dispatchEvent(
|
||||
// new CustomEvent(
|
||||
// ARKitWrapper.COMPUTER_VISION_DATA,
|
||||
// {
|
||||
// source: self,
|
||||
// detail: ev.data
|
||||
// }
|
||||
// )
|
||||
// )
|
||||
// })
|
||||
// }
|
||||
|
||||
this.lightIntensity = 1000;
|
||||
/**
|
||||
|
@ -99,7 +99,7 @@ export default class ARKitWrapper extends EventHandlerBase {
|
|||
}
|
||||
|
||||
// temp storage for CV arraybuffers
|
||||
this._ab = []
|
||||
//this._ab = []
|
||||
|
||||
// Set up some named global methods that the ARKit to JS bridge uses and send out custom events when they are called
|
||||
let eventCallbacks = [
|
||||
|
@ -907,41 +907,43 @@ export default class ARKitWrapper extends EventHandlerBase {
|
|||
// convert the arrays
|
||||
if (!detail) {
|
||||
console.error("detail passed to _onComputerVisionData is null")
|
||||
this._requestComputerVisionData()
|
||||
return;
|
||||
}
|
||||
// convert the arrays
|
||||
if (!detail.frame || !detail.frame.buffers || detail.frame.buffers.length <= 0) {
|
||||
console.error("detail passed to _onComputerVisionData is bad, no buffers")
|
||||
this._requestComputerVisionData()
|
||||
return;
|
||||
}
|
||||
|
||||
// convert buffers in place
|
||||
var buffers = detail.frame.buffers;
|
||||
//var buffers = detail.frame.buffers;
|
||||
|
||||
// if there are too many cached array buffers, drop the unneeded ones
|
||||
if (this._ab.length > buffers.length) {
|
||||
this._ab = this._ab.slice(0, buffer.length)
|
||||
}
|
||||
// if (this._ab.length > buffers.length) {
|
||||
// this._ab = this._ab.slice(0, buffer.length)
|
||||
// }
|
||||
|
||||
if (this._worker) {
|
||||
detail.ab = this._ab;
|
||||
if (this._ab) {
|
||||
this._worker.postMessage(detail, this._ab);
|
||||
} else {
|
||||
this._worker.postMessage(detail);
|
||||
}
|
||||
} else {
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
// gradually increase the size of the ab[] array to hold the temp buffers,
|
||||
// and add null so it gets allocated properly
|
||||
if (this._ab.length <= i) {
|
||||
this._ab.push(null)
|
||||
}
|
||||
var bufflen = buffers[i].buffer.length;
|
||||
this._ab[i] = buffers[i].buffer = base64.decodeArrayBuffer(buffers[i].buffer, this._ab[i]);
|
||||
var buffersize = buffers[i].buffer.byteLength;
|
||||
var imagesize = buffers[i].size.height * buffers[i].size.bytesPerRow;
|
||||
}
|
||||
// if (this._worker) {
|
||||
// detail.ab = this._ab;
|
||||
// if (this._ab) {
|
||||
// this._worker.postMessage(detail, this._ab);
|
||||
// } else {
|
||||
// this._worker.postMessage(detail);
|
||||
// }
|
||||
// } else {
|
||||
// for (var i = 0; i < buffers.length; i++) {
|
||||
// // gradually increase the size of the ab[] array to hold the temp buffers,
|
||||
// // and add null so it gets allocated properly
|
||||
// if (this._ab.length <= i) {
|
||||
// this._ab.push(null)
|
||||
// }
|
||||
// var bufflen = buffers[i].buffer.length;
|
||||
// this._ab[i] = buffers[i].buffer = base64.decodeArrayBuffer(buffers[i].buffer, this._ab[i]);
|
||||
// var buffersize = buffers[i].buffer.byteLength;
|
||||
// var imagesize = buffers[i].size.height * buffers[i].size.bytesPerRow;
|
||||
// }
|
||||
switch(detail.frame.pixelFormatType) {
|
||||
case "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange":
|
||||
detail.frame.pixelFormat = "YUV420P";
|
||||
|
@ -951,148 +953,137 @@ export default class ARKitWrapper extends EventHandlerBase {
|
|||
break;
|
||||
}
|
||||
|
||||
var xrVideoFrame = new XRVideoFrame(detail.frame.buffers, detail.frame.pixelFormat, detail.frame.timestamp, detail.camera )
|
||||
this.dispatchEvent(
|
||||
new CustomEvent(
|
||||
ARKitWrapper.COMPUTER_VISION_DATA,
|
||||
{
|
||||
source: this,
|
||||
detail: detail
|
||||
detail: xrVideoFrame
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
//}
|
||||
}
|
||||
|
||||
/*
|
||||
Requests ARKit a new set of buffers for computer vision processing
|
||||
*/
|
||||
_requestComputerVisionData(buffers) {
|
||||
if (buffers) {
|
||||
this._ab = [];
|
||||
// if buffers are passed in, check if they are ArrayBuffers, and if so, save
|
||||
// them for possible use on the next frame.
|
||||
//
|
||||
// we do this because passing buffers down into Workers invalidates them, so we need to
|
||||
// return them here when we get them back from the Worker, so they can be reused.
|
||||
for (var i=0; i< buffers.length; i++) {
|
||||
if (buffers[i] instanceof ArrayBuffer) {
|
||||
this._ab.push(buffers[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
_requestComputerVisionData() {
|
||||
window.webkit.messageHandlers.requestComputerVisionData.postMessage({})
|
||||
}
|
||||
|
||||
|
||||
_buildWorkerBlob() {
|
||||
var blobURL = URL.createObjectURL( new Blob([ '(',
|
||||
// _buildWorkerBlob() {
|
||||
// var blobURL = URL.createObjectURL( new Blob([ '(',
|
||||
|
||||
function(){
|
||||
// could not get workers working, so am not using this.
|
||||
//
|
||||
// Tried to use Transferable ArrayBuffers but kept getting DOM Error 25.
|
||||
//
|
||||
// function(){
|
||||
// // could not get workers working, so am not using this.
|
||||
// //
|
||||
// // Tried to use Transferable ArrayBuffers but kept getting DOM Error 25.
|
||||
// //
|
||||
|
||||
var b64 = {
|
||||
_keyStr: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
|
||||
// var b64 = {
|
||||
// _keyStr: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
|
||||
|
||||
/* will return a Uint8Array type */
|
||||
decodeArrayBuffer: function(input, buffer) {
|
||||
var bytes = (input.length/4) * 3;
|
||||
if (!buffer || buffer.byteLength != bytes) {
|
||||
// replace the buffer with a new, appropriately sized one
|
||||
buffer = new ArrayBuffer(bytes);
|
||||
}
|
||||
this.decode(input, buffer);
|
||||
// /* will return a Uint8Array type */
|
||||
// decodeArrayBuffer: function(input, buffer) {
|
||||
// var bytes = (input.length/4) * 3;
|
||||
// if (!buffer || buffer.byteLength != bytes) {
|
||||
// // replace the buffer with a new, appropriately sized one
|
||||
// buffer = new ArrayBuffer(bytes);
|
||||
// }
|
||||
// this.decode(input, buffer);
|
||||
|
||||
return buffer;
|
||||
},
|
||||
// return buffer;
|
||||
// },
|
||||
|
||||
removePaddingChars: function(input){
|
||||
var lkey = this._keyStr.indexOf(input.charAt(input.length - 1));
|
||||
if(lkey == 64){
|
||||
return input.substring(0,input.length - 1);
|
||||
}
|
||||
return input;
|
||||
},
|
||||
// removePaddingChars: function(input){
|
||||
// var lkey = this._keyStr.indexOf(input.charAt(input.length - 1));
|
||||
// if(lkey == 64){
|
||||
// return input.substring(0,input.length - 1);
|
||||
// }
|
||||
// return input;
|
||||
// },
|
||||
|
||||
decode: function(input, arrayBuffer) {
|
||||
//get last chars to see if are valid
|
||||
input = this.removePaddingChars(input);
|
||||
input = this.removePaddingChars(input);
|
||||
// decode: function(input, arrayBuffer) {
|
||||
// //get last chars to see if are valid
|
||||
// input = this.removePaddingChars(input);
|
||||
// input = this.removePaddingChars(input);
|
||||
|
||||
var bytes = parseInt((input.length / 4) * 3, 10);
|
||||
// var bytes = parseInt((input.length / 4) * 3, 10);
|
||||
|
||||
var uarray;
|
||||
var chr1, chr2, chr3;
|
||||
var enc1, enc2, enc3, enc4;
|
||||
var i = 0;
|
||||
var j = 0;
|
||||
// var uarray;
|
||||
// var chr1, chr2, chr3;
|
||||
// var enc1, enc2, enc3, enc4;
|
||||
// var i = 0;
|
||||
// var j = 0;
|
||||
|
||||
if (arrayBuffer)
|
||||
uarray = new Uint8Array(arrayBuffer);
|
||||
else
|
||||
uarray = new Uint8Array(bytes);
|
||||
// if (arrayBuffer)
|
||||
// uarray = new Uint8Array(arrayBuffer);
|
||||
// else
|
||||
// uarray = new Uint8Array(bytes);
|
||||
|
||||
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");
|
||||
// input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");
|
||||
|
||||
for (i=0; i<bytes; i+=3) {
|
||||
//get the 3 octects in 4 ascii chars
|
||||
enc1 = this._keyStr.indexOf(input.charAt(j++));
|
||||
enc2 = this._keyStr.indexOf(input.charAt(j++));
|
||||
enc3 = this._keyStr.indexOf(input.charAt(j++));
|
||||
enc4 = this._keyStr.indexOf(input.charAt(j++));
|
||||
// for (i=0; i<bytes; i+=3) {
|
||||
// //get the 3 octects in 4 ascii chars
|
||||
// enc1 = this._keyStr.indexOf(input.charAt(j++));
|
||||
// enc2 = this._keyStr.indexOf(input.charAt(j++));
|
||||
// enc3 = this._keyStr.indexOf(input.charAt(j++));
|
||||
// enc4 = this._keyStr.indexOf(input.charAt(j++));
|
||||
|
||||
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||
// chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||
// chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||
// chr3 = ((enc3 & 3) << 6) | enc4;
|
||||
|
||||
uarray[i] = chr1;
|
||||
if (enc3 != 64) uarray[i+1] = chr2;
|
||||
if (enc4 != 64) uarray[i+2] = chr3;
|
||||
}
|
||||
// uarray[i] = chr1;
|
||||
// if (enc3 != 64) uarray[i+1] = chr2;
|
||||
// if (enc4 != 64) uarray[i+2] = chr3;
|
||||
// }
|
||||
|
||||
return uarray;
|
||||
}
|
||||
}
|
||||
// return uarray;
|
||||
// }
|
||||
// }
|
||||
|
||||
self.addEventListener('message', function(event){
|
||||
var frame = event.data.frame
|
||||
var camera = event.data.camera
|
||||
// self.addEventListener('message', function(event){
|
||||
// var frame = event.data.frame
|
||||
// var camera = event.data.camera
|
||||
|
||||
var ab = event.data.ab;
|
||||
// var ab = event.data.ab;
|
||||
|
||||
// convert buffers in place
|
||||
var buffers = frame.buffers;
|
||||
var buffs = []
|
||||
// if there are too many cached array buffers, drop the unneeded ones
|
||||
if (ab.length > buffers.length) {
|
||||
ab = ab.slice(0, buffer.length)
|
||||
}
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
// gradually increase the size of the ab[] array to hold the temp buffers,
|
||||
// and add null so it gets allocated properly
|
||||
if (ab.length <= i) {
|
||||
ab.push(null)
|
||||
}
|
||||
ab[i] = buffers[i].buffer = b64.decodeArrayBuffer(buffers[i].buffer, ab[i]);
|
||||
buffs.push(buffers[i].buffer)
|
||||
}
|
||||
switch(frame.pixelFormatType) {
|
||||
case "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange":
|
||||
frame.pixelFormat = "YUV420P";
|
||||
break;
|
||||
default:
|
||||
frame.pixelFormat = frame.pixelFormatType;
|
||||
break;
|
||||
}
|
||||
// // convert buffers in place
|
||||
// var buffers = frame.buffers;
|
||||
// var buffs = []
|
||||
// // if there are too many cached array buffers, drop the unneeded ones
|
||||
// if (ab.length > buffers.length) {
|
||||
// ab = ab.slice(0, buffer.length)
|
||||
// }
|
||||
// for (var i = 0; i < buffers.length; i++) {
|
||||
// // gradually increase the size of the ab[] array to hold the temp buffers,
|
||||
// // and add null so it gets allocated properly
|
||||
// if (ab.length <= i) {
|
||||
// ab.push(null)
|
||||
// }
|
||||
// ab[i] = buffers[i].buffer = b64.decodeArrayBuffer(buffers[i].buffer, ab[i]);
|
||||
// buffs.push(buffers[i].buffer)
|
||||
// }
|
||||
// switch(frame.pixelFormatType) {
|
||||
// case "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange":
|
||||
// frame.pixelFormat = "YUV420P";
|
||||
// break;
|
||||
// default:
|
||||
// frame.pixelFormat = frame.pixelFormatType;
|
||||
// break;
|
||||
// }
|
||||
|
||||
postMessage(event.data, buffs);
|
||||
});
|
||||
}.toString(),
|
||||
')()' ], { type: 'application/javascript' } ) )
|
||||
// postMessage(event.data, buffs);
|
||||
// });
|
||||
// }.toString(),
|
||||
// ')()' ], { type: 'application/javascript' } ) )
|
||||
|
||||
return( blobURL );
|
||||
}
|
||||
// return( blobURL );
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const path = require('path');
|
||||
|
||||
module.exports = {
|
||||
var xrPolyfill = {
|
||||
entry: './polyfill/XRPolyfill.js',
|
||||
output: {
|
||||
filename: 'webxr-polyfill.js',
|
||||
|
@ -26,3 +26,32 @@ module.exports = {
|
|||
extensions: ['.js']
|
||||
}
|
||||
};
|
||||
|
||||
var xrVideoWorker = {
|
||||
entry: './polyfill/XRWorkerPolyfill.js',
|
||||
output: {
|
||||
filename: 'webxr-worker.js',
|
||||
path: path.resolve(__dirname, 'dist')
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.js$/,
|
||||
include: [
|
||||
path.resolve(__dirname, "polyfill"),
|
||||
],
|
||||
use: {
|
||||
loader: 'babel-loader',
|
||||
options: {
|
||||
presets: ['env']
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
resolve: {
|
||||
extensions: ['.js']
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = [xrPolyfill, xrVideoWorker]
|
||||
|
|
Загрузка…
Ссылка в новой задаче