first working checkin of new CV version

with opencv sample
This commit is contained in:
Blair MacIntyre 2018-04-05 12:20:16 -04:00
Родитель 712a6e666e
Коммит 2dd8c1a736
10 изменённых файлов: 483 добавлений и 200 удалений

Просмотреть файл

@ -50,33 +50,36 @@
<script type="module" src="../../polyfill/XRPolyfill.js"></script>
<script nomodule src="../../dist/webxr-polyfill.js"></script>
<script>
var cvStatusTxt = "";
// var Module = {
// preRun: [function() {
// Module.FS_createPreloadedFile('/', 'haarcascade_eye.xml', 'haarcascade_eye.xml', true, false);
// Module.FS_createPreloadedFile('/', 'haarcascade_frontalface_default.xml', 'haarcascade_frontalface_default.xml', true, false);
// Module.FS_createPreloadedFile('/', 'haarcascade_profileface.xml', 'haarcascade_profileface.xml', true, false);
// }],
// onRuntimeInitialized: function() {
// opencvIsReady();
// },
// setStatus: function(msg) {
// cvStatusTxt = msg;
// }
// };
var cvStatusTxt = "";
// Needed if you want to run the OpenCV code inside the web page. Doesn't hurt to have it here
// otherwise.
var Module = {
preRun: [function() {
Module.FS_createPreloadedFile('/', 'haarcascade_eye.xml', 'haarcascade_eye.xml', true, false);
Module.FS_createPreloadedFile('/', 'haarcascade_frontalface_default.xml', 'haarcascade_frontalface_default.xml', true, false);
Module.FS_createPreloadedFile('/', 'haarcascade_profileface.xml', 'haarcascade_profileface.xml', true, false);
}],
onRuntimeInitialized: function() {
opencvIsReady();
},
setStatus: function(msg) {
cvStatusTxt = msg;
}
};
</script>
<!--script src="opencv.js"></script-->
<script src="../common.js"></script>
</head>
<body>
<!-- video frame -->
<canvas style="transform: translate(-50%, 50%);opacity:0.5;position:absolute;bottom:50%;left:50%;border:1px solid green" id="video_canvas" width="320" height="240"> </canvas>
<img src="target-28139_64.png" class="crosshair" />
<!-- place to render over the video frame -->
<canvas style="transform: translate(-50%, 0);opacity:0.5;position:absolute;border:1px solid green" id="video_canvas" width="100%" height="100%"> </canvas>
<div id="target" />
<div onclick="hideMe(this)" id="description">
<h2>Simple Computer Vision</h2>
<h2>OpenCV Face Tracking Demo</h2>
<h5>(click to dismiss)</h5>
<p>Compute the average intensity of the video image pixels.</p>
<p>Use OpenCV to find faces and draw a box around them in 2D.</p>
</div>
<script id="worker1" type="javascript/worker">
</script>
@ -99,16 +102,26 @@
beginTime = time;
}
var cvStartTime = 0;
var cvAfterMatTime = 0;
var cvAfterResizeTime = 0;
var cvEndTime = 0;
var cvMatTime = 0;
var cvFaceTime = 0
var cvResizeTime = 0;
var cvIdleTime = 0;
// flag to set true if you want to construct a texture from the UV image
var makeTexUV = false;
var makeTexY = false;
// has openCV loaded?
var openCVready = false;
var showCVImage = false;
var cvImageDiv = document.getElementById("video_canvas");
var cvImageCtx = cvImageDiv.getContext('2d');
document.body.appendChild( stats.dom );
@ -128,6 +141,12 @@
this.rotatedImage = null;
this.face_cascade = null;
this.eye_cascade = null;
this.triggerResize = true;
window.addEventListener('resize', () => {
this.triggerResize = true;
})
}
newSession() {
@ -138,24 +157,56 @@
this.worker = new Worker ("worker.js")
var self = this;
this.worker.onmessage = function(ev) {
this.worker.onmessage = (ev) => {
switch (ev.data.type) {
case "cvFrame":
var videoFrame = XRVideoFrame.createFromMessage(ev)
self.faceRects = ev.data.faceRects;
for (let i = 0; i < self.faceRects.length; i++) {
let rect = self.faceRects[i];
cvImageDiv.context.strokeRect(rect.x, rect.y, rect.width , rect.height);
this.faceRects = ev.data.faceRects;
cvEndTime = ev.data.time;
cvFaceTime = cvEndTime - cvAfterResizeTime;
var rotation = videoFrame.camera.cameraOrientation;
var buffer = videoFrame.buffer(0)
var width = buffer.size.width
var height = buffer.size.height
if (this.triggerResize || this.rotation != rotation) {
this.triggerResize = false;
this.rotation = rotation;
this.adjustRenderCanvasSize(rotation, width, height)
}
self.handleVisionDone(videoFrame);
cvImageCtx.clearRect(0, 0, cvImageDiv.width, cvImageDiv.height);
for (let i = 0; i < this.faceRects.length; i++) {
let rect = this.faceRects[i];
cvImageCtx.strokeRect(rect.x, rect.y, rect.width , rect.height);
}
this.handleVisionDone(videoFrame);
updateCVFPS();
// pass the buffers back or they will be garbage collected
videoFrame.release();
break;
case "cvStart":
// request the next one when the old one finishes
this.requestVideoFrame();
cvStartTime = ev.data.time;
if (cvEndTime > 0) {
cvIdleTime = cvStartTime - cvEndTime;
}
break
case "cvAfterMat":
cvAfterMatTime = ev.data.time;
cvMatTime = cvAfterMatTime - cvStartTime
break;
case "cvAfterResize":
cvAfterResizeTime = ev.data.time;
cvResizeTime = cvAfterResizeTime - cvAfterMatTime
break;
case "cvReady":
console.log('OpenCV.js is ready');
openCVready = true
@ -190,6 +241,40 @@
// })
}
adjustRenderCanvasSize (rotation, width, height) {
var cameraAspect;
if(rotation == 90 || rotation == -90) {
cameraAspect = height / width;
cvImageDiv.width = height
cvImageDiv.height = width
} else {
cameraAspect = width / height;
cvImageDiv.width = width
cvImageDiv.height = height
}
// reposition to DIV
var windowWidth = this.session.baseLayer.framebufferWidth;
var windowHeight = this.session.baseLayer.framebufferHeight;
var windowAspect = windowWidth / windowHeight;
var translateX = 0;
var translateY = 0;
if (cameraAspect > windowAspect) {
windowWidth = windowHeight * cameraAspect;
translateX = -(windowWidth - this.session.baseLayer.framebufferWidth)/2;
} else {
windowHeight = windowWidth / cameraAspect;
translateY = -(windowHeight - this.session.baseLayer.framebufferHeight)/2;
}
cvImageDiv.style.width = windowWidth.toFixed(2) + 'px'
cvImageDiv.style.height = windowHeight.toFixed(2) + 'px'
cvImageDiv.style.transform = "translate(" + translateX.toFixed(2) + "px, "+ translateY.toFixed(2) + "px)"
}
// Called during construction
initializeScene(){
// make and display an image of the UV image buffer
@ -232,14 +317,31 @@
this.lightEstimate = frame.lightEstimate || 0;
stats.update()
var txt = "<center>OpenCV Status: " + cvStatusTxt;
txt += "<br>ARKit Light Estimate: " + this.lightEstimate.toFixed(2);
txt += "<br>Face: ";
if (this.faceRects.length > 0) {
txt += "[" + this.faceRects.toString + "]"
var txt = "<center>"
txt += "ARKit Light Estimate: " + this.lightEstimate.toFixed(2);
txt += "<br>" ;
if (cvStatusTxt.length > 0) {
txt += "OpenCV: " + cvStatusTxt + "<br>"
} else {
txt += "NO FACE"
txt += "<br>"
}
if (openCVready) {
txt += "Looking for faces: "
if (this.faceRects.length > 0) {
txt += "found " + this.faceRects.length.toString() + " faces and/or eyes"
} else {
txt += "NO FACE"
}
txt += "<br>timing (idle / createMat / resize / detect:<br> "
txt += cvIdleTime.toFixed(2)
txt += " " + (cvMatTime).toFixed(2)
txt += " " + (cvResizeTime).toFixed(2)
txt += " " + (cvFaceTime).toFixed(2)
} else {
txt += "(Initializing OpenCV)"
}
txt += "</center>"
this.messageText = txt;
@ -391,40 +493,22 @@
return rects
}
// 0 UIDeviceOrientationUnknown
// 1 UIDeviceOrientationPortrait
// 2 UIDeviceOrientationPortraitUpsideDown
// 3 UIDeviceOrientationLandscapeRight
// 4 UIDeviceOrientationLandscapeLeft --- normal?
//
rotateImage(rotation, buffer) {
var width = buffer.size.width
var height = buffer.size.height
if (!this.rotatedImage || (this.rotation != rotation)) {
this.rotation = rotation;
var cameraAspect;
if(rotation ==1 || rotation == 2) {
this.rotatedImage = new cv.Mat(width, height, cv.CV_8U)
cameraAspect = height / width;
} else {
this.rotatedImage = new cv.Mat(height, width, cv.CV_8U)
cameraAspect = width / height;
if (this.triggerResize || !this.rotatedImage || (this.rotation != rotation)) {
this.triggerResize = false;
if (!this.rotatedImage || (this.rotation != rotation)) {
this.rotation = rotation;
if(rotation == 90 || rotation == -90) {
this.rotatedImage = new cv.Mat(width, height, cv.CV_8U)
} else {
this.rotatedImage = new cv.Mat(height, width, cv.CV_8U)
}
}
// reposition to DIV
var windowWidth = this.session.baseLayer.framebufferWidth;
var windowHeight = this.session.baseLayer.framebufferHeight;
var windowAspect = windowWidth / windowHeight;
if (cameraAspect > windowAspect) {
windowWidth = windowHeight * cameraAspect;
} else {
windowHeight = windowWidth / cameraAspect;
}
var cvTxt ="transform: translate(-50%, 50%);opacity:0.5;position:absolute;bottom:50%;left:50%;border:1px solid green; width:"+ windowWidth + "px;height:" + windowHeight + "px";
console.log("update CV canvas style to: " + cvTxt)
cvImageDiv.style = cvTxt;
this.adjustRenderCanvasSize(rotation, width, height)
}
var src, dest;
src = dest = 0;
@ -435,7 +519,7 @@
var rowExtra = buffer.size.bytesPerPixel * buffer.size.bytesPerRow - width;
switch(rotation) {
case 1:
case -90:
// clockwise
dest = height - 1;
for (j = 0; j < height; j++) {
@ -449,7 +533,7 @@
}
break;
case 2:
case 90:
// anticlockwise
dest = width * (height - 1);
for (j = 0; j < height; j++) {
@ -463,7 +547,7 @@
}
break;
case 4:
case 180:
// 180
dest = width * height - 1;
for (j = 0; j < height; j++) {
@ -475,7 +559,7 @@
break;
case 3:
default:
default: // if it's not one of the 4 cardinal rotations, do nothing, sorry!
// copy
for (j = 0; j < height; j++) {
for (var i = 0; i < width; i++) {
@ -492,14 +576,16 @@
var camera = videoFrame.camera
switch (videoFrame.pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
var rotation = camera.interfaceOrientation;
// first, rotate the image such that it is oriented correctly relative to the display
var rotation = camera.cameraOrientation;
this.rotateImage(rotation, videoFrame.buffer(0))
this.faceRects = this.faceDetect(this.rotatedImage);
}
if (!showCVImage) cvImageDiv.context.clearRect(0, 0, cvImageDiv.width, cvImageDiv.height);
if (!showCVImage) cvImageCtx.clearRect(0, 0, cvImageDiv.width, cvImageDiv.height);
for (let i = 0; i < this.faceRects.length; i++) {
let rect = this.faceRects[i];
cvImageDiv.context.strokeRect(rect.x, rect.y, rect.width , rect.height);
cvImageCtx.strokeRect(rect.x, rect.y, rect.width , rect.height);
}
}
}

Просмотреть файл

@ -2,14 +2,17 @@ importScripts('../../dist/webxr-worker.js')
console.log("loaded webxr-worker.js")
var cvStatusTxt = "";
var openCVready = false;
var Module = {
preRun: [function() {
Module.FS_createPreloadedFile('/', 'haarcascade_eye.xml', 'haarcascade_eye.xml', true, false);
Module.FS_createPreloadedFile('/', 'haarcascade_frontalface_default.xml', 'haarcascade_frontalface_default.xml', true, false);
Module.FS_createPreloadedFile('/', 'haarcascade_profileface.xml', 'haarcascade_profileface.xml', true, false);
console.log("CV preRun")
Module.FS_createPreloadedFile('./', 'haarcascade_eye.xml', 'haarcascade_eye.xml', true, false);
Module.FS_createPreloadedFile('./', 'haarcascade_frontalface_default.xml', 'haarcascade_frontalface_default.xml', true, false);
Module.FS_createPreloadedFile('./', 'haarcascade_profileface.xml', 'haarcascade_profileface.xml', true, false);
}],
onRuntimeInitialized: function() {
openCVready = true;
postMessage({type: "cvReady"});
},
setStatus: function(msg) {
@ -25,55 +28,54 @@ console.log("loaded opencv.js:" + cv);
/**
* In the video callback, ev.detail contains:
{
"frame": {
"buffers": [ // Array of base64 encoded string buffers
{
"size": {
"width": 320,
"height": 180,
"bytesPerRow": 320,
"bytesPerPixel": 1
},
"buffer": "e3x...d7d" /// convert to Uint8 ArrayBuffer in code below
{
"frame": {
"buffers": [ // Array of base64 encoded string buffers
{
"size": {
"width": 320,
"height": 180,
"bytesPerRow": 320,
"bytesPerPixel": 1
},
{
"size": {
"width": 160,
"height": 90,
"bytesPerRow": 320,
"bytesPerPixel": 2
},
"buffer": "ZZF.../fIJ7" /// convert to Uint8 ArrayBuffer in code below
}
],
"pixelFormatType": "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange",
"pixelFormat": "YUV420P", /// Added in the code below, clients should ignore pixelFormatType
"timestamp": 337791
},
"camera": {
"cameraIntrinsics": [3x3 matrix],
fx 0 px
0 fy py
0 0 1
fx and fy are the focal length in pixels.
px and py are the coordinates of the principal point in pixels.
The origin is at the center of the upper-left pixel.
"buffer": "e3x...d7d" /// convert to Uint8 buffer in code below
},
{
"size": {
"width": 160,
"height": 90,
"bytesPerRow": 320,
"bytesPerPixel": 2
},
"buffer": "ZZF.../fIJ7" /// convert to Uint8 buffer in code below
}
],
"pixelFormat": "YUV420P", /// Added in the code below, clients should ignore pixelFormatType
"timestamp": 337791
},
"camera": {
"cameraIntrinsics": [3x3 matrix],
fx 0 px
0 fy py
0 0 1
fx and fy are the focal length in pixels.
px and py are the coordinates of the principal point in pixels.
The origin is at the center of the upper-left pixel.
"cameraImageResolution": {
"width": 1280,
"height": 720
},
"viewMatrix": [4x4 camera view matrix],
"arCamera": true;
"cameraOrientation": 0, // orientation in degrees of image relative to display
// normally 0, but on video mixed displays that keep the camera in a fixed
// orientation, but rotate the UI, like on some phones, this will change
// as the display orientation changes
"projectionMatrix": [4x4 camera projection matrix]
}
}
"cameraImageResolution": {
"width": 1280,
"height": 720
},
"viewMatrix": [4x4 camera view matrix],
"interfaceOrientation": 3,
// 0 UIDeviceOrientationUnknown
// 1 UIDeviceOrientationPortrait
// 2 UIDeviceOrientationPortraitUpsideDown
// 3 UIDeviceOrientationLandscapeRight
// 4 UIDeviceOrientationLandscapeLeft
"projectionMatrix": [4x4 camera projection matrix]
}
}
*/
var face_cascade;
@ -95,16 +97,18 @@ function loadEyesDetectTrainingSet() {
}
}
function faceDetect(img_gray) {
function faceDetect(img_gray, roiRect) {
loadFaceDetectTrainingSet();
let w = Math.floor(img_gray.cols /2);
let h = Math.floor(img_gray.rows /2);
let roiRect = new cv.Rect(w/2, h/2, w, h);
let roi_gray = img_gray.roi(roiRect);
var roi_gray = img_gray
if (roiRect) {
roi_gray = img_gray.roi(roiRect);
} else {
roiRect = new cv.Rect(0, 0, img_gray.cols, img_gray.rows);
}
let faces = new cv.RectVector();
let s1 = new cv.Size(50,50);
let s1 = new cv.Size();
let s2 = new cv.Size();
face_cascade.detectMultiScale(roi_gray, faces, 1.1, 30, 0, s1, s2);
@ -113,13 +117,14 @@ function faceDetect(img_gray) {
for (let i = 0; i < faces.size(); i += 1) {
let faceRect = faces.get(i);
rects.push({
x: faceRect.x,
y: faceRect.y,
x: faceRect.x + roiRect.x,
y: faceRect.y + roiRect.y,
width: faceRect.width,
height: faceRect.height
});
}
if (roi_gray != img_gray) roi_gray.delete()
faces.delete();
return rects;
}
@ -176,21 +181,24 @@ function eyesDetect(img_gray) {
return rects
}
// createCVMat
//
// this routine does two things (if needed) as part of copying the input buffer to a cv.Mat:
// - rotates the image so it is upright
// - converts to greyscale
var rotatedImage = null;
var lastRotation = -1;
// 0 UIDeviceOrientationUnknown
// 1 UIDeviceOrientationPortrait
// 2 UIDeviceOrientationPortraitUpsideDown
// 3 UIDeviceOrientationLandscapeRight
// 4 UIDeviceOrientationLandscapeLeft --- normal?
function rotateImage(rotation, buffer) {
function createCVMat(rotation, buffer, pixelFormat) {
var width = buffer.size.width
var height = buffer.size.height
if (!rotatedImage || (lastRotation != rotation)) {
lastRotation = rotation;
if (rotatedImage) rotatedImage.delete()
if(rotation ==1 || rotation == 2) {
if(rotation == 90 || rotation == -90) {
rotatedImage = new cv.Mat(width, height, cv.CV_8U)
} else {
rotatedImage = new cv.Mat(height, width, cv.CV_8U)
@ -203,15 +211,26 @@ function rotateImage(rotation, buffer) {
var b = new Uint8Array(buffer.buffer);
var r = rotatedImage.data;
var rowExtra = buffer.size.bytesPerPixel * buffer.size.bytesPerRow - width;
var rowExtra = buffer.size.bytesPerRow - buffer.size.bytesPerPixel * width;
switch(rotation) {
case 1:
case -90:
// clockwise
dest = height - 1;
for (j = 0; j < height; j++) {
for (var i = 0; i < width; i++) {
r[dest] = b[src++]
dest += height; // up the row
switch(pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
for (var i = 0; i < width; i++) {
r[dest] = b[src++]
dest += height; // up the row
}
break;
case XRVideoFrame.IMAGEFORMAT_RGBA32:
for (var i = 0; i < width; i++) {
r[dest] = (b[src++] + b[src++] + b[src++]) / 3
src++
dest += height; // up the row
}
break;
}
dest -= width * height;
dest --;
@ -219,13 +238,24 @@ function rotateImage(rotation, buffer) {
}
break;
case 2:
case 90:
// anticlockwise
dest = width * (height - 1);
for (j = 0; j < height; j++) {
for (var i = 0; i < width; i++) {
r[dest] = b[src++]
dest -= height; // down the row
switch(pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
for (var i = 0; i < width; i++) {
r[dest] = b[src++]
dest -= height; // down the row
}
break;
case XRVideoFrame.IMAGEFORMAT_RGBA32:
for (var i = 0; i < width; i++) {
r[dest] = (b[src++] + b[src++] + b[src++]) / 3
src++
dest -= height; // down the row
}
break;
}
dest += width * height;
dest ++;
@ -233,40 +263,109 @@ function rotateImage(rotation, buffer) {
}
break;
case 4:
case 180:
// 180
dest = width * height - 1;
for (j = 0; j < height; j++) {
for (var i = 0; i < width; i++) {
r[dest--] = b[src++]
switch(pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
for (var i = 0; i < width; i++) {
r[dest--] = b[src++]
}
break;
case XRVideoFrame.IMAGEFORMAT_RGBA32:
for (var i = 0; i < width; i++) {
r[dest--] = (b[src++] + b[src++] + b[src++]) / 3
src++
}
break;
}
src += rowExtra;
}
break;
case 3:
case 0:
default:
// copy
for (j = 0; j < height; j++) {
for (var i = 0; i < width; i++) {
r[dest++] = b[src++]
switch(pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
for (var i = 0; i < width; i++) {
r[dest++] = b[src++]
}
break;
case XRVideoFrame.IMAGEFORMAT_RGBA32:
for (var i = 0; i < width; i++) {
r[dest++] = (b[src++] + b[src++] + b[src++]) / 3
src++
}
break;
}
src += rowExtra;
src += rowExtra;
}
}
}
return rotatedImage;
}
var endTime = 0;
self.addEventListener('message', function(event){
postMessage({type: "cvStart", time: ( performance || Date ).now()});
var videoFrame = XRVideoFrame.createFromMessage(event);
var faceRects = []
switch (videoFrame.pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
var rotation = camera.interfaceOrientation;
rotateImage(rotation, videoFrame.buffer(0))
var faceRects = faceDetect(rotatedImage);
if (openCVready) {
switch (videoFrame.pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
case XRVideoFrame.IMAGEFORMAT_RGBA32:
var scale = 1;
var buffer = videoFrame.buffer(0);
var width = buffer.size.width;
var height = buffer.size.height;
videoFrame.postReplyMessage({type: "cvFrame", rects: faceRects})
// let's pick a size such that the video is below 256 in size in both dimensions
while (width > 256 || height > 256) {
width = width / 2
height = height / 2
scale = scale / 2;
}
// first, rotate the image such that it is oriented correctly relative to the display
var rotation = videoFrame.camera.cameraOrientation;
var image = createCVMat(rotation, videoFrame.buffer(0), videoFrame.pixelFormat)
postMessage({type: "cvAfterMat", time: ( performance || Date ).now()});
if (scale != 1) {
var m = new cv.Mat()
cv.resize(image, m, new cv.Size(), scale, scale);
// let w = Math.floor(image.cols /2);
// let h = Math.floor(image.rows /2);
// let roiRect = new cv.Rect(w/2, h/2, w, h);
postMessage({type: "cvAfterResize", time: ( performance || Date ).now()});
// now find faces
faceRects = faceDetect(m);
for (let i = 0; i < faceRects.length; i++) {
let rect = faceRects[i];
rect.x = rect.x / scale
rect.y = rect.y / scale
rect.width = rect.width / scale
rect.height = rect.height / scale
}
m.delete();
} else {
postMessage({type: "cvAfterResize", time: ( performance || Date ).now()});
faceRects = faceDetect(image);
}
}
}
endTime = ( performance || Date ).now()
videoFrame.postReplyMessage({type: "cvFrame", faceRects: faceRects, time: endTime})
videoFrame.release();
});

Просмотреть файл

@ -185,13 +185,13 @@
handleVisionDone(videoFrame) {
var txt = "ARKit Light Estimate: " + this.lightEstimate.toFixed(2) + "<br>CV Average Intensity: " + this.intensity.toFixed(2)
+ "<br>Center RGB: " + this.cr.toFixed(2) + " / " + this.cg.toFixed(2) + " / " + this.cb.toFixed(2) + "<br><center>";
+ "<br>Center RGB: " + this.cr.toFixed(0) + " / " + this.cg.toFixed(0) + " / " + this.cb.toFixed(0) + "<br><center>";
for (var i=0; i<colors.length; i++) {
var c = colors[i];
c.dist = (c.cr - this.cr) * (c.cr - this.cr) + (c.cb - this.cb)*(c.cb - this.cb);
c.dist = Math.sqrt(c.dist + (c.cg - this.cg) * (c.cg - this.cg))
txt += c.dist.toFixed(1) + " "
txt += c.dist.toFixed(0) + " "
}
for (i=0; i<colors.length; i++) {
c = colors[i];

Просмотреть файл

@ -48,7 +48,12 @@ export default class XRSession extends EventHandlerBase {
if(typeof callback !== 'function'){
throw 'Invalid callback'
}
var skip = false;
return this._display._requestAnimationFrame(() => {
if (skip) {
skip = false;
return;
}
const frame = this._createPresentationFrame()
this._display._reality._handleNewFrame(frame)
this._display._handleNewFrame(frame)
@ -82,6 +87,7 @@ export default class XRSession extends EventHandlerBase {
// }
// worker.postMessage(cv, buffs);
ev.detail.postMessageToWorker(worker)
ev.detail.release()
})
}
this._display.addEventListener("videoFrame", callback)

Просмотреть файл

@ -6,10 +6,7 @@ import base64 from "./fill/base64-binary.js";
/*
XRVideoFrame represents the a video frame from a camera.
*/
/*
ARKit WebXR Viewer current injects this structure:
{
"frame": {
"buffers": [ // Array of base64 encoded string buffers
@ -32,7 +29,6 @@ ARKit WebXR Viewer current injects this structure:
"buffer": "ZZF.../fIJ7" /// convert to Uint8 buffer in code below
}
],
"pixelFormatType": "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange",
"pixelFormat": "YUV420P", /// Added in the code below, clients should ignore pixelFormatType
"timestamp": 337791
},
@ -50,20 +46,16 @@ ARKit WebXR Viewer current injects this structure:
"height": 720
},
"viewMatrix": [4x4 camera view matrix],
"interfaceOrientation": 3,
// 0 UIDeviceOrientationUnknown
// 1 UIDeviceOrientationPortrait
// 2 UIDeviceOrientationPortraitUpsideDown
// 3 UIDeviceOrientationLandscapeRight
// 4 UIDeviceOrientationLandscapeLeft
"arCamera": true;
"cameraOrientation": 0, // orientation in degrees of image relative to display
// normally 0, but on video mixed displays that keep the camera in a fixed
// orientation, but rotate the UI, like on some phones, this will change
// as the display orientation changes
"projectionMatrix": [4x4 camera projection matrix]
}
}
We'll just pass in frame and buffer.
frame.buffers.buffer[*] can be String (which will be lazily converted to ArrayBuffer) or ArrayBuffer.
frame.pixelFormatType will be ignored
pixelFormat should be one of XRVideoFrame.IMAGEFORMAT
*/
@ -81,6 +73,37 @@ export default class XRVideoFrame {
for (var i=0; i< buffers.length; i++) {
buffers[i]._buffer = buffers[i].buffer
buffers[i].buffer = null
// if we didn't pass in an abCache, as might happen when we pass this
// to/from a worker, see if there is a saved ArrayBuffer of the right size
if (!buffers[i]._abCache && typeof buffers[i]._buffer == "string") {
var bytes = base64.decodeLength(buffers[i]._buffer);
for (var j=0; j < _ab.length; j++) {
if (_ab[j].byteLength == bytes) {
buffers[i]._abCache = _ab[j]
_ab.splice(j, 1);
break;
}
}
} else if (!buffers[i]._abCache && buffers[i]._buffer instanceof ImageData) {
var data = buffers[i]._buffer.data
var bytes = data.length
for (var j=0; j < _ab.length; j++) {
if (_ab[j].byteLength == bytes) {
buffers[i]._abCache = _ab[j]
_ab.splice(j, 1);
break;
}
}
var ab = buffers[i]._abCache ? buffers[i]._abCache : new ArrayBuffer(bytes)
buffers[i]._abCache = null;
var buffData = new Uint8Array(ab);
for (var k = 0; k < bytes; k++) buffData[k] = data[k]
buffers[i]._buffer = ab
}
}
this._pixelFormat = pixelFormat
this._timestamp = timestamp
@ -98,7 +121,9 @@ export default class XRVideoFrame {
var buff = this._buffers[index]
if (!buff.buffer) {
if (typeof buff._buffer == "string") {
buff._buffer = base64.decodeArrayBuffer(buff._buffer, _ab.length > 0 ? _ab.pop() : null);
// use the ArrayBuffer cache if there
buff._buffer = base64.decodeArrayBuffer(buff._buffer, buff._abCache);
buff._abCache = null;
buff.buffer = new Uint8Array(buff._buffer);
} else if (buff._buffer instanceof ArrayBuffer) {
buff.buffer = new Uint8Array(buff._buffer);
@ -123,10 +148,13 @@ export default class XRVideoFrame {
// return them here when we get them back from the Worker, so they can be reused.
var buffers = this._buffers;
for (var i=0; i< buffers.length; i++) {
if (buffers[i]._buffer instanceof ArrayBuffer || buffers[i]._buffer instanceof ImageData) {
if (buffers[i]._buffer instanceof ArrayBuffer && buffers[i]._buffer.byteLength > 0) {
_ab.push(buffers[i]._buffer)
}
}
if (buffers[i]._abCache instanceof ArrayBuffer && buffers[i]._abCache.byteLength > 0) {
_ab.push(buffers[i]._abCache)
}
}
}
postMessageToWorker (worker, options) {
@ -139,12 +167,15 @@ export default class XRVideoFrame {
var buffs = []
for (var i = 0; i < msg.buffers.length; i++) {
msg.buffers[i].buffer = msg.buffers[i]._buffer;
if (msg.buffers[i]._buffer instanceof ArrayBuffer || msg.buffers[i]._buffer instanceof ImageData) {
buffs.push(msg.buffers[i]._buffer)
} else if (msg.buffers[i]._buffer instanceof ArrayBuffer || msg.buffers[i]._buffer instanceof ImageData) {
buffs.push(msg.buffers[i]._buffer)
}
msg.buffers[i]._buffer = null;
if (msg.buffers[i]._abCache instanceof ArrayBuffer) {
buffs.push(msg.buffers[i]._abCache)
}
}
worker.postMessage(msg, buffs);
}
@ -162,11 +193,14 @@ export default class XRVideoFrame {
if (msg.buffers[i]._buffer instanceof ArrayBuffer || msg.buffers[i]._buffer instanceof ImageData) {
// any array buffers should be marked for transfer
buffs.push(msg.buffers[i]._buffer)
} else {
// if we passed in a string, and it didn't get accessed, we shouldn't pass it back out
msg.buffers[i]._buffer = null
msg.buffers[i].buffer = msg.buffers[i]._buffer
}
}
msg.buffers[i]._buffer = null
if (msg.buffers[i]._abCache instanceof ArrayBuffer) {
buffs.push(msg.buffers[i]._abCache)
}
}
postMessage(msg, buffs);
}
}

Просмотреть файл

@ -4,7 +4,7 @@ Quaternion wraps a vector of length 4 used as an orientation value.
Taken from https://github.com/googlevr/webvr-polyfill/blob/master/src/math-util.js which took it from Three.js
*/
export default class Quaternion{
varructor(x=0, y=0, z=0, w=1){
constructor(x=0, y=0, z=0, w=1){
this.x = x
this.y = y
this.z = z

Просмотреть файл

@ -4,7 +4,7 @@ Vector3 wraps a vector of length 3, often used as a position in 3D space.
Taken from https://github.com/googlevr/webvr-polyfill/blob/master/src/math-util.js which took it from Three.js
*/
export default class Vector3 {
varructor(x=0, y=0, z=0){
constructor(x=0, y=0, z=0){
this.x = x
this.y = y
this.z = z

Просмотреть файл

@ -34,7 +34,10 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
export default class base64 {
static decodeLength(input) {
return (input.length/4) * 3;
}
/* will return a Uint8Array type */
static decodeArrayBuffer(input, buffer) {
var bytes = (input.length/4) * 3;

Просмотреть файл

@ -649,6 +649,7 @@ export default class ARKitWrapper extends EventHandlerBase {
// option to WebXRView is different than the WebXR option
if (newO.videoFrames) {
delete newO.videoFrames
newO.computer_vision_data = true;
}
@ -915,6 +916,11 @@ export default class ARKitWrapper extends EventHandlerBase {
"height": 720
},
"viewMatrix": [4x4 camera view matrix],
"arCamera": true;
"cameraOrientation": 0, // orientation in degrees of image relative to display
// normally 0, but on video mixed displays that keep the camera in a fixed
// orientation, but rotate the UI, like on some phones, this will change
// as the display orientation changes
"interfaceOrientation": 3,
// 0 UIDeviceOrientationUnknown
// 1 UIDeviceOrientationPortrait
@ -943,23 +949,33 @@ export default class ARKitWrapper extends EventHandlerBase {
// We need to add an orientation around z, so that we have the orientation that goes from
// camera frame to the current view orientation, since the camera is fixed and the view
// changes as we rotate the device.
//
// We also set a cameraOrientation value for the orientation of the camera relative to the
// display. This will be particular to video-mixed-AR where the camera is the video on the
// screen, since any other setup would need to use the full orientation (and probably
// wouldn't be rotating the content / UI)
detail.camera.arCamera = true;
var orientation = detail.camera.interfaceOrientation;
mat4.copy(this._mTemp, detail.camera.viewMatrix)
switch (orientation) {
case 1:
// rotate by -90;
detail.camera.cameraOrientation = -90;
mat4.multiply(detail.camera.viewMatrix, this._mTemp, this._m90neg)
break;
case 2:
// rotate by 90;
detail.camera.cameraOrientation = 90;
mat4.multiply(detail.camera.viewMatrix, this._mTemp, this._m90)
break;
case 3:
// rotate by nothing
detail.camera.cameraOrientation = 0;
// rotate by nothing
break;
case 4:
// rotate by 180;
detail.camera.cameraOrientation = 180;
mat4.multiply(detail.camera.viewMatrix, this._mTemp, this._m180)
break;
}

Просмотреть файл

@ -43,6 +43,8 @@ export default class CameraReality extends Reality {
// dealing with video frames from webrtc
this._sendingVideo = false;
this._sendVideoFrame = false;
this._videoProjectionMatrix = MatrixMath.mat4_generateIdentity();
this._videoViewMatrix = MatrixMath.mat4_generateIdentity();
this._lightEstimate = new XRLightEstimate();
@ -75,9 +77,36 @@ export default class CameraReality extends Reality {
this._arCoreCanvas.width = window.innerWidth
this._arCoreCanvas.height = window.innerHeight
}
if (this._videoEl) {
this._adjustVideoSize();
}
}, false)
}
_adjustVideoSize () {
var windowWidth = this._xr._realityEls.clientWidth;
var windowHeight = this._xr._realityEls.clientHeight;
var windowAspect = windowWidth / windowHeight;
var canvasWidth = this._videoRenderWidth;
var canvasHeight = this._videoRenderHeight;
var cameraAspect = canvasWidth / canvasHeight;
var translateX = 0;
var translateY = 0;
if (cameraAspect > windowAspect) {
windowWidth = windowHeight * cameraAspect;
translateX = -(windowWidth - this._xr._realityEls.clientWidth)/2;
} else {
windowHeight = windowWidth / cameraAspect;
translateY = -(windowHeight - this._xr._realityEls.clientHeight)/2;
}
this._videoEl.style.width = windowWidth.toFixed(2) + 'px'
this._videoEl.style.height = windowHeight.toFixed(2) + 'px'
this._videoEl.style.transform = "translate(" + translateX.toFixed(2) + "px, "+ translateY.toFixed(2) + "px)"
}
/*
Called by a session before it hands a new XRPresentationFrame to the app
*/
@ -100,10 +129,10 @@ export default class CameraReality extends Reality {
var data = imageData.data
var len = imageData.data.length
var buff = new ArrayBuffer(len)
var buffData = new Uint8Array(buff);
for (var i = 0; i < len; i++) buffData[i] = data[i]
// imageData = new ArrayBuffer(len)
// var buffData = new Uint8Array(imageData);
// for (var i = 0; i < len; i++) buffData[i] = data[i]
var buffers = [
{
size: {
@ -112,23 +141,31 @@ export default class CameraReality extends Reality {
bytesPerRow: canvasWidth * 4,
bytesPerPixel: 4
},
buffer: buff
buffer: imageData
}];
var pixelFormat = XRVideoFrame.IMAGEFORMAT_RGBA32;
var timestamp = frame.timestamp;
// FIX.
// set from frame
var view = frame.views[0];
this._videoViewMatrix.set(view.viewMatrix);
this._videoProjectionMatrix.set(view.projectionMatrix)
var camera = {
cameraIntrinsics: [0, 0, 0, 0, 0, 0, 0, 0, 0],
arCamera: false,
cameraOrientation: 0,
cameraIntrinsics: [(this._videoEl.videoWidth/2) / Math.tan(view._fov.leftDegrees * Math.PI/180), 0, (this._videoEl.videoWidth/2),
0, (this._videoEl.videoHeight/2) / Math.tan(view._fov.upDegrees * Math.PI/180), (this._videoEl.videoHeight/2),
0, 0, 1],
cameraImageResolution: {
width: this._videoEl.videoWidth,
height: this._videoEl.videoHeight
},
viewMatrix: [1,0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
interfaceOrientation: 0,
projectionMatrix: [1,0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1]
viewMatrix: this._videoViewMatrix,
projectionMatrix: this._videoProjectionMatrix
}
var xrVideoFrame = new XRVideoFrame(buffers, pixelFormat, timestamp, camera )
@ -208,7 +245,7 @@ export default class CameraReality extends Reality {
var height = this._videoEl.videoHeight;
// let's pick a size such that the video is below 512 in size in both dimensions
while (width > 512 || height > 512) {
while (width > 256 || height > 256) {
width = width / 2
height = height / 2
}
@ -220,6 +257,8 @@ export default class CameraReality extends Reality {
this._videoFrameCanvas.height = height;
this._videoCtx = this._videoFrameCanvas.getContext('2d');
this._adjustVideoSize();
this._sendVideoFrame = true;
});
}