fixes for computer vision callbacks

- needed to pass session parameters all the way down
- removed extra calls to "watch" from flatDisplay (probably there because of the bug in event dispatch, which caused some to be missed)
- created vision events up through Display and Session.  In Session can now pass "computer_vision_data" as a session parameter, will cause iOS app to generate vision frames.
- can call session.requestVideoFrames() to start sending frames to the callback supplied
This commit is contained in:
Blair MacIntyre 2018-03-18 14:57:53 -04:00
Родитель 19b37f9f97
Коммит 958cf07b06
7 изменённых файлов: 258 добавлений и 92 удалений

Просмотреть файл

@ -27,7 +27,7 @@ export default class Reality extends EventHandlerBase {
/*
Called when at least one active XRSession is using this Reality
*/
_start(){
_start(parameters){
throw new Error('Exending classes should implement _start')
}

Просмотреть файл

@ -70,6 +70,10 @@ export default class XRSession extends EventHandlerBase {
})
}
requestVideoFrames(callback) {
this._display.addEventListener("videoFrame", callback)
}
_createPresentationFrame(){
return new XRPresentationFrame(this)
}
@ -91,7 +95,7 @@ export default class XRSession extends EventHandlerBase {
}
return null
}
/*
attribute EventHandler onblur;
attribute EventHandler onfocus;

Просмотреть файл

@ -41,7 +41,7 @@ export default class FlatDisplay extends XRDisplay {
this._views.push(new XRView(this._fov, this._depthNear, this._depthFar))
}
_start(){
_start(parameters=null){
if(this._reality._vrDisplay){ // Use ARCore
if(this._vrFrameData === null){
this._vrFrameData = new VRFrameData()
@ -62,10 +62,12 @@ export default class FlatDisplay extends XRDisplay {
this._arKitWrapper.addEventListener(ARKitWrapper.AR_TRACKING_CHANGED, this._handleArTrackingChanged.bind(this))
this._arKitWrapper.addEventListener(ARKitWrapper.COMPUTER_VISION_DATA, this._handleComputerVisionData.bind(this))
this._arKitWrapper.waitForInit().then(() => {
this._arKitWrapper.watch()
// doing this in the reality
// this._arKitWrapper.watch()
})
} else {
this._arKitWrapper.watch()
// doing this in the reality
// this._arKitWrapper.watch()
}
} else { // Use device orientation
if(this._initialized === false){
@ -78,7 +80,7 @@ export default class FlatDisplay extends XRDisplay {
}
}
this.running = true
this._reality._start()
this._reality._start(parameters)
}
_stop(){
@ -159,15 +161,16 @@ export default class FlatDisplay extends XRDisplay {
}
_handleARKitInit(ev){
setTimeout(() => {
this._arKitWrapper.watch({
location: true,
camera: true,
objects: true,
light_intensity: true,
computer_vision_data: true
})
}, 1000)
// doing this in the reality
// setTimeout(() => {
// this._arKitWrapper.watch({
// location: true,
// camera: true,
// objects: true,
// light_intensity: true,
// computer_vision_data: true
// })
// }, 1000)
}
_handleARKitWindowResize(ev){
@ -192,66 +195,30 @@ export default class FlatDisplay extends XRDisplay {
// #define WEB_AR_TRACKING_STATE_NOT_AVAILABLE @"ar_tracking_not_available"
}
/*
ev.detail contains:
{
"frame": {
"buffers": [ // Array of base64 encoded string buffers
{
"size": {
"width": 320,
"height": 180
},
"buffer": "e3x...d7d"
},
{
"size": {
"width": 160,
"height": 90
},
"buffer": "ZZF.../fIJ7"
}
],
"pixelFormatType": "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange",
"timestamp": 337791
},
"camera": {
"cameraIntrinsics": [3x3 matrix],
fx 0 px
0 fy py
0 0 1
fx and fy are the focal length in pixels.
px and py are the coordinates of the principal point in pixels.
The origin is at the center of the upper-left pixel.
"cameraImageResolution": {
"width": 1280,
"height": 720
},
"viewMatrix": [4x4 camera view matrix],
"interfaceOrientation": 3,
// 0 UIDeviceOrientationUnknown
// 1 UIDeviceOrientationPortrait
// 2 UIDeviceOrientationPortraitUpsideDown
// 3 UIDeviceOrientationLandscapeRight
// 4 UIDeviceOrientationLandscapeLeft
"projectionMatrix": [4x4 camera projection matrix]
}
}
*/
_handleComputerVisionData(ev) {
this.dispatchEvent(
new CustomEvent(
"videoFrame",
{
source: this,
detail: ev.detail
}
)
)
// Do whatever is needed with the image buffers here, and then call
// this._arKitWrapper.requestComputerVisionData() to request a new one
this._arKitWrapper.requestComputerVisionData()
}
_createSession(parameters){
this._start()
_createSession(parameters=null){
this._start(parameters)
return super._createSession(parameters)
}
_supportedCreationParameters(parameters){
return parameters.type === XRSession.AUGMENTATION && parameters.exclusive === false
return parameters.type === XRSession.AUGMENTATION && parameters.exclusive === false
}
//attribute EventHandler ondeactivate; // FlatDisplay never deactivates

Просмотреть файл

@ -0,0 +1,94 @@
/*
Copyright (c) 2011, Daniel Guerrero
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL DANIEL GUERRERO BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Uses the new array typed in javascript to binary base64 encode/decode
* at the moment just decodes a binary base64 encoded
* into either an ArrayBuffer (decodeArrayBuffer)
* or into an Uint8Array (decode)
*
* References:
* https://developer.mozilla.org/en/JavaScript_typed_arrays/ArrayBuffer
* https://developer.mozilla.org/en/JavaScript_typed_arrays/Uint8Array
*/
export default class base64 {
/* will return a Uint8Array type */
static decodeArrayBuffer(input) {
var bytes = (input.length/4) * 3;
var ab = new ArrayBuffer(bytes);
this.decode(input, ab);
return ab;
}
static removePaddingChars(input){
var lkey = this._keyStr.indexOf(input.charAt(input.length - 1));
if(lkey == 64){
return input.substring(0,input.length - 1);
}
return input;
}
static decode(input, arrayBuffer) {
//get last chars to see if are valid
input = this.removePaddingChars(input);
input = this.removePaddingChars(input);
var bytes = parseInt((input.length / 4) * 3, 10);
var uarray;
var chr1, chr2, chr3;
var enc1, enc2, enc3, enc4;
var i = 0;
var j = 0;
if (arrayBuffer)
uarray = new Uint8Array(arrayBuffer);
else
uarray = new Uint8Array(bytes);
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");
for (i=0; i<bytes; i+=3) {
//get the 3 octects in 4 ascii chars
enc1 = this._keyStr.indexOf(input.charAt(j++));
enc2 = this._keyStr.indexOf(input.charAt(j++));
enc3 = this._keyStr.indexOf(input.charAt(j++));
enc4 = this._keyStr.indexOf(input.charAt(j++));
chr1 = (enc1 << 2) | (enc2 >> 4);
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
chr3 = ((enc3 & 3) << 6) | enc4;
uarray[i] = chr1;
if (enc3 != 64) uarray[i+1] = chr2;
if (enc4 != 64) uarray[i+2] = chr3;
}
return uarray;
}
}
base64._keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="

Просмотреть файл

@ -3,6 +3,7 @@ import * as glMatrix from "../fill/gl-matrix/common.js";
import * as mat4 from "../fill/gl-matrix/mat4.js";
import * as quat from "../fill/gl-matrix/quat.js";
import * as vec3 from "../fill/gl-matrix/vec3.js";
import base64 from "../fill/base64-binary.js";
/*
ARKitWrapper talks to Apple ARKit, as exposed by Mozilla's test ARDemo app.
@ -41,17 +42,17 @@ export default class ARKitWrapper extends EventHandlerBase {
this.lightIntensity = 1000;
/**
* The current projection matrix of the device.
* @type {Float32Array}
* @private
*/
this.projectionMatrix_ = new Float32Array(16);
/**
* The current view matrix of the device.
* @type {Float32Array}
* @private
*/
this.viewMatrix_ = new Float32Array(16);
* The current projection matrix of the device.
* @type {Float32Array}
* @private
*/
this.projectionMatrix_ = new Float32Array(16);
/**
* The current view matrix of the device.
* @type {Float32Array}
* @private
*/
this.viewMatrix_ = new Float32Array(16);
/**
* The list of planes coming from ARKit.
* @type {Map<number, ARPlane}
@ -67,7 +68,16 @@ export default class ARKitWrapper extends EventHandlerBase {
for(let i=0; i < callbackNames.length; i++){
this._generateGlobalCallback(callbackNames[i], i)
}
// default options for initializing ARKit
this._defaultOptions = {
location: true,
camera: true,
objects: true,
light_intensity: true,
computer_vision_data: true
}
// Set up some named global methods that the ARKit to JS bridge uses and send out custom events when they are called
let eventCallbacks = [
['arkitStartRecording', ARKitWrapper.RECORD_START_EVENT],
@ -79,8 +89,8 @@ export default class ARKitWrapper extends EventHandlerBase {
['arkitShowDebug', ARKitWrapper.SHOW_DEBUG_EVENT],
['arkitWindowResize', ARKitWrapper.WINDOW_RESIZE_EVENT],
['onError', ARKitWrapper.ON_ERROR],
['arTrackingChanged', ARKitWrapper.AR_TRACKING_CHANGED],
['onComputerVisionData', ARKitWrapper.COMPUTER_VISION_DATA]
['arTrackingChanged', ARKitWrapper.AR_TRACKING_CHANGED]
//,['onComputerVisionData', ARKitWrapper.COMPUTER_VISION_DATA]
]
for(let i=0; i < eventCallbacks.length; i++){
window[eventCallbacks[i][0]] = (detail) => {
@ -96,6 +106,13 @@ export default class ARKitWrapper extends EventHandlerBase {
)
}
}
/*
* Computer vision needs massaging
*/
window['onComputerVisionData'] = (detail) => {
this._onComputerVisionData(detail);
}
/**
* The result of a raycast into the AR world encoded as a transform matrix.
* This structure has a single property - modelMatrix - which encodes the
@ -545,6 +562,11 @@ export default class ARKitWrapper extends EventHandlerBase {
})
}
/*
RACE CONDITION: call stop, then watch: stop does not set isWatching false until it gets a message back from the app,
so watch will return and not issue a watch command. May want to set isWatching false immediately?
*/
/*
If this instance is currently watching, send the stopAR message to ARKit to request that it stop sending data on onWatch
*/
@ -572,6 +594,7 @@ export default class ARKitWrapper extends EventHandlerBase {
computer_vision_data: boolean
}
*/
watch(options=null){
if (!this._isInitialized){
return false
@ -581,18 +604,14 @@ export default class ARKitWrapper extends EventHandlerBase {
}
this._isWatching = true
if(options === null){
options = {
location: true,
camera: true,
objects: true,
light_intensity: true,
computer_vision_data: true
}
var newO = Object.assign({}, this._defaultOptions);
if(options != null) {
newO = Object.assign(newO, options)
}
const data = {
options: options,
options: newO,
callback: this._globalCallbacksMap.onWatch
}
console.log('----WATCH');
@ -804,6 +823,88 @@ export default class ARKitWrapper extends EventHandlerBase {
}
}
/*
ev.detail contains:
{
"frame": {
"buffers": [ // Array of base64 encoded string buffers
{
"size": {
"width": 320,
"height": 180
},
"buffer": "e3x...d7d"
},
{
"size": {
"width": 160,
"height": 90
},
"buffer": "ZZF.../fIJ7"
}
],
"pixelFormatType": "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange",
"timestamp": 337791
},
"camera": {
"cameraIntrinsics": [3x3 matrix],
fx 0 px
0 fy py
0 0 1
fx and fy are the focal length in pixels.
px and py are the coordinates of the principal point in pixels.
The origin is at the center of the upper-left pixel.
"cameraImageResolution": {
"width": 1280,
"height": 720
},
"viewMatrix": [4x4 camera view matrix],
"interfaceOrientation": 3,
// 0 UIDeviceOrientationUnknown
// 1 UIDeviceOrientationPortrait
// 2 UIDeviceOrientationPortraitUpsideDown
// 3 UIDeviceOrientationLandscapeRight
// 4 UIDeviceOrientationLandscapeLeft
"projectionMatrix": [4x4 camera projection matrix]
}
}
*/
_onComputerVisionData(detail) {
// convert the arrays
if (!detail.frame || !detail.frame.buffers || detail.frame.buffers.length <= 0) {
console.error("detail passed to _onComputerVisionData is bad, no buffers")
return;
}
// convert buffers in place
var buffers = detail.frame.buffers;
for (var i = 0; i < buffers.length; i++) {
var bufflen = buffers[i].buffer.length;
buffers[i].buffer = base64.decode(buffers[i].buffer);
var buffersize = buffers[i].buffer.length;
var imagesize = buffers[i].size.height * buffers[i].size.bytesPerRow;
}
switch(detail.frame.pixelFormatType) {
case "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange":
detail.frame.pixelFormat = "YUV420P";
break;
default:
detail.frame.pixelFormat = detail.frame.pixelFormatType;
break;
}
this.dispatchEvent(
new CustomEvent(
ARKitWrapper.COMPUTER_VISION_DATA,
{
source: this,
detail: detail
}
)
)
}
/*
Requests ARKit a new set of buffers for computer vision processing
*/

Просмотреть файл

@ -88,7 +88,7 @@ export default class CameraReality extends Reality {
// TODO update the anchor positions using ARCore or ARKit
}
_start(){
_start(parameters=null){
if(this._running) return
this._running = true
@ -105,10 +105,10 @@ export default class CameraReality extends Reality {
this._arKitWrapper = ARKitWrapper.GetOrCreate()
this._arKitWrapper.addEventListener(ARKitWrapper.WATCH_EVENT, this._handleARKitWatch.bind(this))
this._arKitWrapper.waitForInit().then(() => {
this._arKitWrapper.watch()
this._arKitWrapper.watch(parameters)
})
} else {
this._arKitWrapper.watch()
this._arKitWrapper.watch(parameters)
}
} else { // Using WebRTC
if(this._initialized === false){
@ -182,7 +182,7 @@ export default class CameraReality extends Reality {
return
}
// This assumes that the anchor's coordinates are in the tracker coordinate system
anchor.coordinateSystem._relativeMatrix = anchorInfo.modelMatrix
anchor.coordinateSystem._relativeMatrix = anchorInfo.transform
}
_addAnchor(anchor, display){

Просмотреть файл

@ -11,7 +11,7 @@ export default class VirtualReality extends Reality {
/*
Called when at least one active XRSession is using this Reality
*/
_start(){
_start(parameters){
}
/*