Make CV work on normal browsers

We send the video frames from WebRTC to the app.

The data structure needs to be fixed (it's incorrect right now), but at least we can debug CV.

Needed to restructure a lot of things, and have the sample apps work with both YUV and RGBA buffers.
This commit is contained in:
Blair MacIntyre 2018-04-04 00:37:57 -04:00
Родитель e4a04f871b
Коммит 712a6e666e
11 изменённых файлов: 395 добавлений и 150 удалений

Просмотреть файл

@ -65,9 +65,9 @@
<script>
// RAINBOW Candy of a Certain Name colors
var colors = [
{ cr: 230, cb: 100, name: "RED" },
{ cr: 71, cb: 71, name: "GREEN" },
{ cr: 152, cb: 33, name: "YELLOW" }
{ cr: 250, cg: 25, cb: 25, name: "RED" },
{ cr: 25, cg: 250, cb: 25, name: "GREEN" },
{ cr: 250, cg: 250, cb: 50, name: "YELLOW" }
]
var beginTime = ( performance || Date ).now(), prevTime = beginTime, frames = 0;
@ -102,6 +102,7 @@
this.textBox.innerText = '0.0'
this.intensity = 0;
this.cr = 0;
this.cg = 0;
this.cb = 0;
this.lightEstimate = 0;
this.el.appendChild(this.textBox)
@ -120,6 +121,7 @@
var videoFrame = XRVideoFrame.createFromMessage(ev)
self.intensity = ev.data.intensity;
self.cr = ev.data.cr;
self.cg = ev.data.cg;
self.cb = ev.data.cb;
self.handleVisionDone(videoFrame);
}
@ -183,11 +185,12 @@
handleVisionDone(videoFrame) {
var txt = "ARKit Light Estimate: " + this.lightEstimate.toFixed(2) + "<br>CV Average Intensity: " + this.intensity.toFixed(2)
+ "<br>Center R/B: " + this.cr.toFixed(2) + " / " + this.cb.toFixed(2) + "<br><center>";
+ "<br>Center RGB: " + this.cr.toFixed(2) + " / " + this.cg.toFixed(2) + " / " + this.cb.toFixed(2) + "<br><center>";
for (var i=0; i<colors.length; i++) {
var c = colors[i];
c.dist = Math.sqrt((c.cr - this.cr) * (c.cr - this.cr) + (c.cb - this.cb)*(c.cb - this.cb));
c.dist = (c.cr - this.cr) * (c.cr - this.cr) + (c.cb - this.cb)*(c.cb - this.cb);
c.dist = Math.sqrt(c.dist + (c.cg - this.cg) * (c.cg - this.cg))
txt += c.dist.toFixed(1) + " "
}
for (i=0; i<colors.length; i++) {
@ -203,14 +206,13 @@
// in the worker case, if we don't access the buffer in the worker, it won't be pulled back
if (makeTexUV && videoFrame.buffer(1).buffer) {
var buffer = videoFrame.buffer(1);
var buff = buffer.buffer;
if (this.texSize != (buff.byteLength /2 *3)) {
this.texSize = buff.byteLength /2 * 3
var pixels = buffer.buffer;
if (this.texSize != (pixels.length /2 *3)) {
this.texSize = pixels.length /2 * 3
this.texBuff = new Uint8Array( this.texSize ); // convert each pixel from 2 to 3 bytes
}
var j = 0;
var pixels = new Uint8Array(buff);
for ( var i = 0; i < this.texSize; i ++ ) {
this.texBuff[i] = pixels[j++];
i++;
@ -234,7 +236,7 @@
var w = buffer.size.width;
var h = buffer.size.height;
var pad = buffer.size.bytesPerRow - w;
var pixels = new Uint8Array(buffer.buffer);
var pixels = buffer.buffer;
var intensity = 0.0;
var p = 0;
@ -256,7 +258,7 @@
colorAtCenter(buffer) {
var w = buffer.size.width;
var h = buffer.size.height;
var pixels = new Uint8Array(buffer.buffer);
var pixels = buffer.buffer;
var cx = Math.floor(w / 2) * buffer.size.bytesPerPixel
var cy = Math.floor(h / 2)

Просмотреть файл

@ -56,13 +56,34 @@ importScripts('../../dist/webxr-worker.js')
var intensity = 0.0;
var cr = -1;
var cg = -1;
var cb = -1;
averageIntensity = function (buffer) {
averageIntensityRGBA = function (buffer) {
var w = buffer.size.width;
var h = buffer.size.height;
var pad = buffer.size.bytesPerRow - w;
var pixels = new Uint8Array(buffer.buffer);
var pad = buffer.size.bytesPerRow - w * buffer.size.bytesPerPixel;
var pixels = buffer.buffer;
intensity = 0.0;
var p = 0;
for (var r = 0; r < h; r++) {
var v = 0;
for (var i = 0; i < w; i++) {
v += (pixels[p++] + pixels[p++] + pixels[p++]) / 3
p++
}
intensity += v / w;
p += pad;
}
intensity = (intensity / h) / 255.0;
}
averageIntensityLum = function (buffer) {
var w = buffer.size.width;
var h = buffer.size.height;
var pad = buffer.size.bytesPerRow - w * buffer.size.bytesPerPixel;
var pixels = buffer.buffer;
intensity = 0.0;
var p = 0;
@ -81,36 +102,92 @@ averageIntensity = function (buffer) {
intensity = (intensity / h) / 255.0;
}
colorAtCenter = function(buffer) {
var w = buffer.size.width;
var h = buffer.size.height;
var pixels = new Uint8Array(buffer.buffer);
colorAtCenterRGB = function(buffer0) {
var w = buffer0.size.width;
var h = buffer0.size.height;
var pixels = buffer0.buffer;
var cx = Math.floor(w / 2) * buffer.size.bytesPerPixel
var cx = Math.floor(w / 2) * buffer0.size.bytesPerPixel
var cy = Math.floor(h / 2)
var p = cy * buffer.size.bytesPerRow + cx;
var p = cy * buffer0.size.bytesPerRow + cx;
cr = pixels[p++];
cg = pixels[p++];
cb = pixels[p];
}
// LUV == LuCbCr
//
// Y = 0.299R + 0.587G + 0.114B
// U'= (B-Y)*0.565
// V'= (R-Y)*0.713
clamp = function (x, min, max) {
if (x < min) {
return min;
}
if (x > max) {
return max;
}
return x;
}
colorAtCenterLUV = function(buffer0, buffer1) {
var w = buffer0.size.width;
var h = buffer0.size.height;
var pixels = buffer0.buffer;
var cx = Math.floor(w / 2) * buffer0.size.bytesPerPixel
var cy = Math.floor(h / 2)
var p = cy * buffer0.size.bytesPerRow + cx;
var lum = pixels[p];
w = buffer1.size.width;
h = buffer1.size.height;
pixels = buffer1.buffer;
cx = Math.floor(w / 2) * buffer1.size.bytesPerPixel
cy = Math.floor(h / 2)
p = cy * buffer1.size.bytesPerRow + cx;
cb = pixels[p++];
cr = pixels[p];
// luv -> rgb. see https://www.fourcc.org/fccyvrgb.php
var y=1.1643*(lum-16)
var u=cb-128;
var v=cr-128;
cr=clamp(y+1.5958*v, 0, 255);
cg=clamp(y-0.39173*u-0.81290*v, 0, 255);
cb=clamp(y+2.017*u, 0, 255);
// Alternatives:
//
// var y=lum
// var u=cb-128;
// var v=cr-128;
// cr=y+1.402*v;
// cg=y-0.34414*u-0.71414*v;
// cb=y+1.772*u;
}
self.addEventListener('message', function(event){
var videoFrame = XRVideoFrame.createFromMessage(event);
try {
var videoFrame = XRVideoFrame.createFromMessage(event);
switch (videoFrame.pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
this.averageIntensity(videoFrame.buffer(0))
this.colorAtCenter(videoFrame.buffer(1))
// // pass the buffers back or they will be garbage collected
// var buffers = frame.buffers
// var buffs = []
// for (var i = 0; i < buffers.length; i++) {
// buffs.push(buffers[i].buffer)
// }
// postMessage ({intensity: intensity, cr: cr, cb: cb, buffers: buffs, frame: frame}, buffs);
videoFrame.postReplyMessage({intensity: intensity, cr: cr, cb: cb})
switch (videoFrame.pixelFormat) {
case XRVideoFrame.IMAGEFORMAT_YUV420P:
this.averageIntensityLum(videoFrame.buffer(0))
this.colorAtCenterLUV(videoFrame.buffer(0),videoFrame.buffer(1))
break;
case XRVideoFrame.IMAGEFORMAT_RGBA32:
this.averageIntensityRGBA(videoFrame.buffer(0))
this.colorAtCenterRGB(videoFrame.buffer(0))
break;
}
videoFrame.postReplyMessage({intensity: intensity, cr: cr, cg: cg, cb: cb})
videoFrame.release();
} catch(e) {
console.error('page error', e)
}
videoFrame.release();
});
// setInterval( function(){

Просмотреть файл

@ -38,6 +38,12 @@ export default class Reality extends EventHandlerBase {
throw new Error('Exending classes should implement _stop')
}
/*
Request another video frame be generated, typically from video-mixed Realities.
*/
_requestVideoFrame() {
}
/*
Called by a session before it hands a new XRPresentationFrame to the app
*/
@ -84,5 +90,8 @@ export default class Reality extends EventHandlerBase {
_getLightAmbientIntensity(){
throw new Error('Exending classes should implement _getLightAmbientIntensity')
}
// attribute EventHandler onchange;
}
Reality.COMPUTER_VISION_DATA = 'cv_data'

Просмотреть файл

@ -7,6 +7,7 @@ XRPresentationFrame provides all of the values needed to render a single frame o
export default class XRPresentationFrame {
constructor(session){
this._session = session
this._timestamp = this._session.reality._getTimeStamp();
}
get session(){ return this._session }
@ -36,6 +37,9 @@ export default class XRPresentationFrame {
return this._session.reality._getLightAmbientIntensity();
}
get timestamp () {
return this._timestamp;
}
/*
Returns an array of known XRAnchor instances. May be empty.
*/

Просмотреть файл

@ -74,9 +74,14 @@ pixelFormat should be one of XRVideoFrame.IMAGEFORMAT
// because the same size and number of buffers will be pushed/popped in the same order
var _ab = []
export default class XRVideoFrame {
constructor(buffers, pixelFormat, timestamp, camera){
this._buffers = buffers
for (var i=0; i< buffers.length; i++) {
buffers[i]._buffer = buffers[i].buffer
buffers[i].buffer = null
}
this._pixelFormat = pixelFormat
this._timestamp = timestamp
this._camera = camera
@ -91,14 +96,19 @@ export default class XRVideoFrame {
buffer(index) {
if (index >= 0 && index < this._buffers.length) {
var buff = this._buffers[index]
if (typeof buff.buffer == "string") {
var bufflen = buff.buffer.length;
buff.buffer = base64.decodeArrayBuffer(buff.buffer, _ab.length > 0 ? _ab.pop() : null);
var buffersize = buff.buffer.byteLength;
var imagesize = buff.size.height * buff.size.bytesPerRow;
}
if (!buff.buffer) {
if (typeof buff._buffer == "string") {
buff._buffer = base64.decodeArrayBuffer(buff._buffer, _ab.length > 0 ? _ab.pop() : null);
buff.buffer = new Uint8Array(buff._buffer);
} else if (buff._buffer instanceof ArrayBuffer) {
buff.buffer = new Uint8Array(buff._buffer);
} else if (buff._buffer instanceof ImageData) {
buff.buffer = ImageData.data
}
}
return buff;
}
return null
}
get pixelFormat(){ return this._pixelFormat }
@ -113,8 +123,8 @@ export default class XRVideoFrame {
// return them here when we get them back from the Worker, so they can be reused.
var buffers = this._buffers;
for (var i=0; i< buffers.length; i++) {
if (buffers[i].buffer instanceof ArrayBuffer) {
_ab.push(buffers[i].buffer)
if (buffers[i]._buffer instanceof ArrayBuffer || buffers[i]._buffer instanceof ImageData) {
_ab.push(buffers[i]._buffer)
}
}
}
@ -128,9 +138,13 @@ export default class XRVideoFrame {
var buffs = []
for (var i = 0; i < msg.buffers.length; i++) {
if (msg.buffers[i].buffer instanceof ArrayBuffer) {
buffs.push(msg.buffers[i].buffer)
msg.buffers[i].buffer = msg.buffers[i]._buffer;
if (msg.buffers[i]._buffer instanceof ArrayBuffer || msg.buffers[i]._buffer instanceof ImageData) {
buffs.push(msg.buffers[i]._buffer)
} else if (msg.buffers[i]._buffer instanceof ArrayBuffer || msg.buffers[i]._buffer instanceof ImageData) {
buffs.push(msg.buffers[i]._buffer)
}
msg.buffers[i]._buffer = null;
}
worker.postMessage(msg, buffs);
}
@ -144,12 +158,13 @@ export default class XRVideoFrame {
var buffs = []
for (var i = 0; i < msg.buffers.length; i++) {
if (msg.buffers[i].buffer instanceof ArrayBuffer) {
msg.buffers[i].buffer = null;
if (msg.buffers[i]._buffer instanceof ArrayBuffer || msg.buffers[i]._buffer instanceof ImageData) {
// any array buffers should be marked for transfer
buffs.push(msg.buffers[i].buffer)
buffs.push(msg.buffers[i]._buffer)
} else {
// if we passed in a string, and it didn't get accessed, we shouldn't pass it back out
msg.buffers.buffer[i] = null
msg.buffers[i]._buffer = null
}
}
postMessage(msg, buffs);

Просмотреть файл

@ -77,6 +77,7 @@ export default class FlatDisplay extends XRDisplay {
this._deviceWorldMatrix = new Float32Array(16)
this._deviceOrientationTracker = new DeviceOrientationTracker()
this._deviceOrientationTracker.addEventListener(DeviceOrientationTracker.ORIENTATION_UPDATE_EVENT, this._updateFromDeviceOrientationTracker.bind(this))
this._reality.addEventListener(Reality.COMPUTER_VISION_DATA, this._handleComputerVisionData.bind(this))
}
}
this.running = true
@ -214,16 +215,12 @@ export default class FlatDisplay extends XRDisplay {
}
_requestVideoFrame() {
if(this._reality._vrDisplay){ // Use ARCore
// AR Core can do this, just need to write this code I think!
} else if(this._arKitWrapper){ // Use ARKit
if(this._arKitWrapper){ // Use ARKit
// call this._arKitWrapper.requestComputerVisionData(buffers) to request a new one
this._arKitWrapper._requestComputerVisionData()
} else if (this._reality._mediaStream) {
// normal display, might have webrtc video in reality
this._reality._requestVideoFrame( (ev) => {
this._handleComputerVisionData(ev)
})
} else {
// might have webrtc video in the reality
this._reality._requestVideoFrame()
}
}

Просмотреть файл

@ -41,13 +41,13 @@ export default class MatrixMath {
}
static mat4_perspectiveFromFieldOfView(out, fov, near, far) {
const upTan = Math.tan(fov.upDegrees * MatrixMath.PI_OVER_180)
const downTan = Math.tan(fov.downDegrees * MatrixMath.PI_OVER_180)
const leftTan = Math.tan(fov.leftDegrees * MatrixMath.PI_OVER_180)
const rightTan = Math.tan(fov.rightDegrees * MatrixMath.PI_OVER_180)
var upTan = Math.tan(fov.upDegrees * MatrixMath.PI_OVER_180)
var downTan = Math.tan(fov.downDegrees * MatrixMath.PI_OVER_180)
var leftTan = Math.tan(fov.leftDegrees * MatrixMath.PI_OVER_180)
var rightTan = Math.tan(fov.rightDegrees * MatrixMath.PI_OVER_180)
const xScale = 2.0 / (leftTan + rightTan)
const yScale = 2.0 / (upTan + downTan)
var xScale = 2.0 / (leftTan + rightTan)
var yScale = 2.0 / (upTan + downTan)
out[0] = xScale
out[1] = 0.0
@ -70,23 +70,23 @@ export default class MatrixMath {
static mat4_fromRotationTranslation(out, q=[0,0,0,1], v=[0,0,0]) {
// Quaternion math
const x = q[0]
const y = q[1]
const z = q[2]
const w = q[3]
const x2 = x + x
const y2 = y + y
const z2 = z + z
var x = q[0]
var y = q[1]
var z = q[2]
var w = q[3]
var x2 = x + x
var y2 = y + y
var z2 = z + z
const xx = x * x2
const xy = x * y2
const xz = x * z2
const yy = y * y2
const yz = y * z2
const zz = z * z2
const wx = w * x2
const wy = w * y2
const wz = w * z2
var xx = x * x2
var xy = x * y2
var xz = x * z2
var yy = y * y2
var yz = y * z2
var zz = z * z2
var wx = w * x2
var wy = w * y2
var wz = w * z2
out[0] = 1 - (yy + zz)
out[1] = xy + wz
@ -109,9 +109,9 @@ export default class MatrixMath {
}
static mat4_translate(out, a, v) {
const x = v[0]
const y = v[1]
const z = v[2]
var x = v[0]
var y = v[1]
var z = v[2]
let a00
let a01
let a02
@ -143,23 +143,23 @@ export default class MatrixMath {
}
static mat4_invert(out, a) {
const a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15]
const b00 = a00 * a11 - a01 * a10
const b01 = a00 * a12 - a02 * a10
const b02 = a00 * a13 - a03 * a10
const b03 = a01 * a12 - a02 * a11
const b04 = a01 * a13 - a03 * a11
const b05 = a02 * a13 - a03 * a12
const b06 = a20 * a31 - a21 * a30
const b07 = a20 * a32 - a22 * a30
const b08 = a20 * a33 - a23 * a30
const b09 = a21 * a32 - a22 * a31
const b10 = a21 * a33 - a23 * a31
const b11 = a22 * a33 - a23 * a32
var b00 = a00 * a11 - a01 * a10
var b01 = a00 * a12 - a02 * a10
var b02 = a00 * a13 - a03 * a10
var b03 = a01 * a12 - a02 * a11
var b04 = a01 * a13 - a03 * a11
var b05 = a02 * a13 - a03 * a12
var b06 = a20 * a31 - a21 * a30
var b07 = a20 * a32 - a22 * a30
var b08 = a20 * a33 - a23 * a30
var b09 = a21 * a32 - a22 * a31
var b10 = a21 * a33 - a23 * a31
var b11 = a22 * a33 - a23 * a32
// Calculate the determinant
let det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06
@ -190,15 +190,15 @@ export default class MatrixMath {
}
static mat4_multiply(out, ae, be){
const a11 = ae[0], a12 = ae[4], a13 = ae[8], a14 = ae[12]
const a21 = ae[1], a22 = ae[5], a23 = ae[9], a24 = ae[13]
const a31 = ae[2], a32 = ae[6], a33 = ae[10], a34 = ae[14]
const a41 = ae[3], a42 = ae[7], a43 = ae[11], a44 = ae[15]
var a11 = ae[0], a12 = ae[4], a13 = ae[8], a14 = ae[12]
var a21 = ae[1], a22 = ae[5], a23 = ae[9], a24 = ae[13]
var a31 = ae[2], a32 = ae[6], a33 = ae[10], a34 = ae[14]
var a41 = ae[3], a42 = ae[7], a43 = ae[11], a44 = ae[15]
const b11 = be[0], b12 = be[4], b13 = be[8], b14 = be[12]
const b21 = be[1], b22 = be[5], b23 = be[9], b24 = be[13]
const b31 = be[2], b32 = be[6], b33 = be[10], b34 = be[14]
const b41 = be[3], b42 = be[7], b43 = be[11], b44 = be[15]
var b11 = be[0], b12 = be[4], b13 = be[8], b14 = be[12]
var b21 = be[1], b22 = be[5], b23 = be[9], b24 = be[13]
var b31 = be[2], b32 = be[6], b33 = be[10], b34 = be[14]
var b41 = be[3], b42 = be[7], b43 = be[11], b44 = be[15]
out[0] = a11 * b11 + a12 * b21 + a13 * b31 + a14 * b41
out[4] = a11 * b12 + a12 * b22 + a13 * b32 + a14 * b42

Просмотреть файл

@ -4,7 +4,7 @@ Quaternion wraps a vector of length 4 used as an orientation value.
Taken from https://github.com/googlevr/webvr-polyfill/blob/master/src/math-util.js which took it from Three.js
*/
export default class Quaternion{
constructor(x=0, y=0, z=0, w=1){
varructor(x=0, y=0, z=0, w=1){
this.x = x
this.y = y
this.z = z
@ -40,28 +40,28 @@ export default class Quaternion{
m21 = array16[1], m22 = array16[5], m23 = array16[9],
m31 = array16[2], m32 = array16[6], m33 = array16[10]
const trace = m11 + m22 + m33
var trace = m11 + m22 + m33
if(trace > 0){
const s = 0.5 / Math.sqrt(trace + 1.0)
var s = 0.5 / Math.sqrt(trace + 1.0)
this.w = 0.25 / s
this.x = (m32 - m23) * s
this.y = (m13 - m31) * s
this.z = (m21 - m12) * s
} else if (m11 > m22 && m11 > m33){
const s = 2.0 * Math.sqrt(1.0 + m11 - m22 - m33)
var s = 2.0 * Math.sqrt(1.0 + m11 - m22 - m33)
this.w = (m32 - m23) / s
this.x = 0.25 * s
this.y = (m12 + m21) / s
this.z = (m13 + m31) / s
} else if (m22 > m33){
const s = 2.0 * Math.sqrt(1.0 + m22 - m11 - m33)
var s = 2.0 * Math.sqrt(1.0 + m22 - m11 - m33)
this.w = (m13 - m31) / s
this.x = (m12 + m21) / s
this.y = 0.25 * s
this.z = (m23 + m32) / s
} else{
const s = 2.0 * Math.sqrt(1.0 + m33 - m11 - m22)
var s = 2.0 * Math.sqrt(1.0 + m33 - m11 - m22)
this.w = (m21 - m12) / s
this.x = (m13 + m31) / s
this.y = (m23 + m32) / s
@ -75,14 +75,14 @@ export default class Quaternion{
// 20696-function-to-convert-between-dcm-euler-angles-quaternions-and-euler-vectors/
// content/SpinCalc.m
const cos = Math.cos
const sin = Math.sin
const c1 = cos(x / 2)
const c2 = cos(y / 2)
const c3 = cos(z / 2)
const s1 = sin(x / 2)
const s2 = sin(y / 2)
const s3 = sin(z / 2)
var cos = Math.cos
var sin = Math.sin
var c1 = cos(x / 2)
var c2 = cos(y / 2)
var c3 = cos(z / 2)
var s1 = sin(x / 2)
var s2 = sin(y / 2)
var s3 = sin(z / 2)
if (order === 'XYZ'){
this.x = s1 * c2 * c3 + c1 * s2 * s3
@ -120,8 +120,8 @@ export default class Quaternion{
setFromAxisAngle(axis, angle){
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/angleToQuaternion/index.htm
// assumes axis is normalized
const halfAngle = angle / 2
const s = Math.sin(halfAngle)
var halfAngle = angle / 2
var s = Math.sin(halfAngle)
this.x = axis.x * s
this.y = axis.y * s
this.z = axis.z * s
@ -135,8 +135,8 @@ export default class Quaternion{
multiplyQuaternions(a, b){
// from http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/code/index.htm
const qax = a.x, qay = a.y, qaz = a.z, qaw = a.w
const qbx = b.x, qby = b.y, qbz = b.z, qbw = b.w
var qax = a.x, qay = a.y, qaz = a.z, qaw = a.w
var qbx = b.x, qby = b.y, qbz = b.z, qbw = b.w
this.x = qax * qbw + qaw * qbx + qay * qbz - qaz * qby
this.y = qay * qbw + qaw * qby + qaz * qbx - qax * qbz
this.z = qaz * qbw + qaw * qbz + qax * qby - qay * qbx
@ -174,7 +174,7 @@ export default class Quaternion{
if(t === 0) return this
if(t === 1) return this.copy(qb)
const x = this.x, y = this.y, z = this.z, w = this.w
var x = this.x, y = this.y, z = this.z, w = this.w
let cosHalfTheta = w * qb.w + x * qb.x + y * qb.y + z * qb.z
if (cosHalfTheta < 0){
this.w = - qb.w
@ -193,8 +193,8 @@ export default class Quaternion{
return this
}
const halfTheta = Math.acos(cosHalfTheta)
const sinHalfTheta = Math.sqrt(1.0 - cosHalfTheta * cosHalfTheta)
var halfTheta = Math.acos(cosHalfTheta)
var sinHalfTheta = Math.sqrt(1.0 - cosHalfTheta * cosHalfTheta)
if (Math.abs(sinHalfTheta) < 0.001){
this.w = 0.5 * (w + this.w)
this.x = 0.5 * (x + this.x)
@ -204,8 +204,8 @@ export default class Quaternion{
return this
}
const ratioA = Math.sin((1 - t) * halfTheta) / sinHalfTheta
const ratioB = Math.sin(t * halfTheta) / sinHalfTheta
var ratioA = Math.sin((1 - t) * halfTheta) / sinHalfTheta
var ratioB = Math.sin(t * halfTheta) / sinHalfTheta
this.w = (w * ratioA + this.w * ratioB)
this.x = (x * ratioA + this.x * ratioB)
this.y = (y * ratioA + this.y * ratioB)

Просмотреть файл

@ -4,7 +4,7 @@ Vector3 wraps a vector of length 3, often used as a position in 3D space.
Taken from https://github.com/googlevr/webvr-polyfill/blob/master/src/math-util.js which took it from Three.js
*/
export default class Vector3 {
constructor(x=0, y=0, z=0){
varructor(x=0, y=0, z=0){
this.x = x
this.y = y
this.z = z
@ -57,20 +57,20 @@ export default class Vector3 {
}
applyQuaternion(q){
const x = this.x
const y = this.y
const z = this.z
var x = this.x
var y = this.y
var z = this.z
const qx = q.x
const qy = q.y
const qz = q.z
const qw = q.w
var qx = q.x
var qy = q.y
var qz = q.z
var qw = q.w
// calculate quat * vector
const ix = qw * x + qy * z - qz * y
const iy = qw * y + qz * x - qx * z
const iz = qw * z + qx * y - qy * x
const iw = - qx * x - qy * y - qz * z
var ix = qw * x + qy * z - qz * y
var iy = qw * y + qz * x - qx * z
var iz = qw * z + qx * y - qy * x
var iw = - qx * x - qy * y - qz * z
// calculate result * inverse quat
this.x = ix * qw + iw * - qx + iy * - qz - iz * - qy
@ -81,10 +81,10 @@ export default class Vector3 {
}
applyMatrix4(matrix){
const x = this.x
const y = this.y
const z = this.z
const w = 1 / (matrix[3] * x + matrix[7] * y + matrix[11] * z + matrix[15])
var x = this.x
var y = this.y
var z = this.z
var w = 1 / (matrix[3] * x + matrix[7] * y + matrix[11] * z + matrix[15])
this.x = (matrix[0] * x + matrix[4] * y + matrix[8] * z + matrix[12]) * w
this.y = (matrix[1] * x + matrix[5] * y + matrix[9] * z + matrix[13]) * w
this.z = (matrix[2] * x + matrix[6] * y + matrix[10] * z + matrix[14]) * w
@ -96,8 +96,8 @@ export default class Vector3 {
}
crossVectors(a, b){
const ax = a.x, ay = a.y, az = a.z
const bx = b.x, by = b.y, bz = b.z
var ax = a.x, ay = a.y, az = a.z
var bx = b.x, by = b.y, bz = b.z
this.x = ay * bz - az * by
this.y = az * bx - ax * bz
this.z = ax * by - ay * bx

Просмотреть файл

@ -4,6 +4,8 @@ import * as mat4 from "../fill/gl-matrix/mat4.js";
import * as quat from "../fill/gl-matrix/quat.js";
import * as vec3 from "../fill/gl-matrix/vec3.js";
import base64 from "../fill/base64-binary.js";
import Quaternion from '../fill/Quaternion.js';
import MatrixMath from '../fill/MatrixMath.js';
/*
ARKitWrapper talks to Apple ARKit, as exposed by Mozilla's test ARDemo app.
@ -73,15 +75,18 @@ export default class ARKitWrapper extends EventHandlerBase {
* @private
*/
this.viewMatrix_ = new Float32Array(16);
/**
/**
* The list of planes coming from ARKit.
* @type {Map<number, ARPlane}
* @private
*/
this.planes_ = new Map();
this.anchors_ = new Map();
this._timeOffset = 0;
this.timestamp = 0;
this._globalCallbacksMap = {} // Used to map a window.arkitCallback method name to an ARKitWrapper.on* method name
// Set up the window.arkitCallback methods that the ARKit bridge depends on
let callbackNames = ['onInit', 'onWatch']
@ -97,7 +102,11 @@ export default class ARKitWrapper extends EventHandlerBase {
light_intensity: true,
computer_vision_data: false
}
this._m90 = mat4.fromZRotation(mat4.create(), 90*MatrixMath.PI_OVER_180);
this._m90neg = mat4.fromZRotation(mat4.create(), -90*MatrixMath.PI_OVER_180);
this._m180 = mat4.fromZRotation(mat4.create(), 180*MatrixMath.PI_OVER_180);
this._mTemp = mat4.create();
// temp storage for CV arraybuffers
//this._ab = []
@ -718,6 +727,7 @@ export default class ARKitWrapper extends EventHandlerBase {
_onWatch is called from native ARKit on each frame:
data:
{
"timestamp": time value
"light_intensity": value
"camera_view":[4x4 column major affine transform matrix],
"projection_camera":[4x4 projection matrix],
@ -743,13 +753,20 @@ export default class ARKitWrapper extends EventHandlerBase {
}
*/
_adjustARKitTime(time) {
if (this._timeOffset < 0) {
this._timeOffset = ( performance || Date ).now() - time;
}
return time + this._timeOffset;
}
_onWatch(data){
this._rawARData = data
this.dispatchEvent(new CustomEvent(ARKitWrapper.WATCH_EVENT, {
source: this,
detail: this._rawARData
}))
this.timestamp = this._adjustARKitTime(data.timestamp)
this.lightIntensity = data.light_intensity;
this.viewMatrix_ = data.camera_view;
this.projectionMatrix_ = data.projection_camera;
@ -922,6 +939,30 @@ export default class ARKitWrapper extends EventHandlerBase {
return;
}
// the orientation matrix we get is relative to the current view orientation.
// We need to add an orientation around z, so that we have the orientation that goes from
// camera frame to the current view orientation, since the camera is fixed and the view
// changes as we rotate the device.
var orientation = detail.camera.interfaceOrientation;
mat4.copy(this._mTemp, detail.camera.viewMatrix)
switch (orientation) {
case 1:
// rotate by -90;
mat4.multiply(detail.camera.viewMatrix, this._mTemp, this._m90neg)
break;
case 2:
// rotate by 90;
mat4.multiply(detail.camera.viewMatrix, this._mTemp, this._m90)
break;
case 3:
// rotate by nothing
break;
case 4:
// rotate by 180;
mat4.multiply(detail.camera.viewMatrix, this._mTemp, this._m180)
break;
}
// convert buffers in place
//var buffers = detail.frame.buffers;
@ -958,7 +999,7 @@ export default class ARKitWrapper extends EventHandlerBase {
break;
}
var xrVideoFrame = new XRVideoFrame(detail.frame.buffers, detail.frame.pixelFormat, detail.frame.timestamp, detail.camera )
var xrVideoFrame = new XRVideoFrame(detail.frame.buffers, detail.frame.pixelFormat, this._adjustARKitTime(detail.frame.timestamp), detail.camera )
this.dispatchEvent(
new CustomEvent(
ARKitWrapper.COMPUTER_VISION_DATA,

Просмотреть файл

@ -40,6 +40,10 @@ export default class CameraReality extends Reality {
this._vrDisplay = null
this._vrFrameData = null
// dealing with video frames from webrtc
this._sendingVideo = false;
this._sendVideoFrame = false;
this._lightEstimate = new XRLightEstimate();
// Try to find a WebVR 1.1 display that supports Google's ARCore extensions
@ -85,6 +89,60 @@ export default class CameraReality extends Reality {
this._vrDisplay.getFrameData(this._vrFrameData)
}
// WebRTC video
if (this._videoEl && this._sendVideoFrame) {
this._sendVideoFrame = false;
var canvasWidth = this._videoRenderWidth;
var canvasHeight = this._videoRenderHeight;
this._videoCtx.drawImage(this._videoEl, 0, 0, canvasWidth, canvasHeight);
var imageData = this._videoCtx.getImageData(0, 0, canvasWidth, canvasHeight);
var data = imageData.data
var len = imageData.data.length
var buff = new ArrayBuffer(len)
var buffData = new Uint8Array(buff);
for (var i = 0; i < len; i++) buffData[i] = data[i]
var buffers = [
{
size: {
width: canvasWidth,
height: canvasHeight,
bytesPerRow: canvasWidth * 4,
bytesPerPixel: 4
},
buffer: buff
}];
var pixelFormat = XRVideoFrame.IMAGEFORMAT_RGBA32;
var timestamp = frame.timestamp;
// FIX.
var camera = {
cameraIntrinsics: [0, 0, 0, 0, 0, 0, 0, 0, 0],
cameraImageResolution: {
width: this._videoEl.videoWidth,
height: this._videoEl.videoHeight
},
viewMatrix: [1,0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
interfaceOrientation: 0,
projectionMatrix: [1,0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1]
}
var xrVideoFrame = new XRVideoFrame(buffers, pixelFormat, timestamp, camera )
this.dispatchEvent(
new CustomEvent(
Reality.COMPUTER_VISION_DATA,
{
source: this,
detail: xrVideoFrame
}
)
)
}
// TODO update the anchor positions using ARCore or ARKit
}
@ -125,18 +183,52 @@ export default class CameraReality extends Reality {
this._videoEl.style.height = '100%'
this._videoEl.srcObject = stream
this._videoEl.play()
this._setupWebRTC(parameters)
}).catch(err => {
console.error('Could not set up video stream', err)
this._initialized = false
this._running = false
})
} else {
this._xr._realityEls.appendChild(this._videoEl)
this._videoEl.play()
if (this._videoEl) {
this._xr._realityEls.appendChild(this._videoEl)
this._videoEl.play()
this._setupWebRTC(parameters)
}
}
}
}
_setupWebRTC(parameters) {
if (parameters.videoFrames) {
this._sendingVideo = true;
this._videoEl.addEventListener('loadedmetadata', () => {
var width = this._videoEl.videoWidth;
var height = this._videoEl.videoHeight;
// let's pick a size such that the video is below 512 in size in both dimensions
while (width > 512 || height > 512) {
width = width / 2
height = height / 2
}
this._videoRenderWidth = width;
this._videoRenderHeight = height;
this._videoFrameCanvas = document.createElement('canvas');
this._videoFrameCanvas.width = width;
this._videoFrameCanvas.height = height;
this._videoCtx = this._videoFrameCanvas.getContext('2d');
this._sendVideoFrame = true;
});
}
}
_requestVideoFrame() {
this._sendVideoFrame = true;
}
_stop(){
if(this._running === false) return
this._running = false
@ -349,6 +441,14 @@ export default class CameraReality extends Reality {
}
}
_getTimeStamp() {
if(this._arKitWrapper !== null){
return this._arKitWrapper.timestamp;
}else{
// use performance.now()
return ( performance || Date ).now();
}
}
/*
No floor in AR
*/