Work in progress on a sample using ARKit 1.5 image detection.
This commit is contained in:
Родитель
6700182c5a
Коммит
02239f2087
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 3.3 KiB |
|
@ -0,0 +1,157 @@
|
|||
|
||||
<html>
|
||||
<head>
|
||||
<title>ARKit 1.5 image detection example</title>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
|
||||
<style>
|
||||
body, html {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
overflow: hidden;
|
||||
position: fixed;
|
||||
width: 100%;
|
||||
height: 100vh;
|
||||
-webkit-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
#target {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
position: absolute;
|
||||
}
|
||||
.common-message {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
font-size: 20px;
|
||||
}
|
||||
</style>
|
||||
<link rel="stylesheet" href="../common.css"/>
|
||||
<script src="../libs/three.min.js"></script>
|
||||
<script type="module" src="../../polyfill/XRPolyfill.js"></script>
|
||||
<script type="module" src="../../polyfill/platform/ARKitWrapper.js"></script>
|
||||
<script nomodule src="../../dist/webxr-polyfill.js"></script>
|
||||
<script src="../common.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="target" />
|
||||
<img id="aruco-marker" src="Aruco-marker.jpg">
|
||||
<div onclick="hideMe(this)" id="description">
|
||||
<h2>Hit Test</h2>
|
||||
<h5>(click to dismiss)</h5>
|
||||
<p>Find anchors by searching on tap events.</p>
|
||||
</div>
|
||||
<script>
|
||||
class ARKitImageDetectionExample extends XRExampleBase {
|
||||
constructor(domElement){
|
||||
super(domElement, false)
|
||||
this._tapEventData = null // Will be filled in on touch start and used in updateScene
|
||||
|
||||
// A message at the bottom of the screen that shows whether a surface has been found
|
||||
this._messageEl = document.createElement('div')
|
||||
this.el.appendChild(this._messageEl)
|
||||
this._messageEl.style.position = 'absolute'
|
||||
this._messageEl.style.bottom = '10px'
|
||||
this._messageEl.style.left = '10px'
|
||||
this._messageEl.style.color = 'white'
|
||||
this._messageEl.style['font-size'] = '16px'
|
||||
|
||||
this.el.addEventListener('touchstart', this._onTouchStart.bind(this), false)
|
||||
}
|
||||
|
||||
// Called during construction to allow the app to populate this.scene
|
||||
initializeScene(){
|
||||
// Add a box at the scene origin
|
||||
let box = new THREE.Mesh(
|
||||
new THREE.BoxBufferGeometry(0.1, 0.1, 0.1),
|
||||
new THREE.MeshPhongMaterial({ color: '#DDFFDD' })
|
||||
)
|
||||
box.position.set(0, 0.05, 0)
|
||||
this.floorGroup.add(box)
|
||||
|
||||
// Add a few lights
|
||||
this.scene.add(new THREE.AmbientLight('#FFF', 0.2))
|
||||
let directionalLight = new THREE.DirectionalLight('#FFF', 0.6)
|
||||
directionalLight.position.set(0, 10, 0)
|
||||
this.scene.add(directionalLight)
|
||||
}
|
||||
|
||||
// Called once per frame, before render, to give the app a chance to update this.scene
|
||||
updateScene(frame){
|
||||
// If we have tap data, attempt a hit test for a surface
|
||||
if(this._tapEventData !== null){
|
||||
const x = this._tapEventData[0]
|
||||
const y = this._tapEventData[1]
|
||||
this._tapEventData = null
|
||||
|
||||
let imageData = this.getImageData()
|
||||
frame.createImageAnchor('aruco-marker', imageData.data, imageData.width, imageData.height, 0.1)
|
||||
|
||||
|
||||
|
||||
// Attempt a hit test using the normalized screen coordinates
|
||||
/*
|
||||
frame.findAnchor(x, y).then(anchorOffset => {
|
||||
if(anchorOffset === null){
|
||||
this._messageEl.innerHTML = 'miss'
|
||||
} else {
|
||||
this._messageEl.innerHTML = 'hit'
|
||||
this.addAnchoredNode(anchorOffset, this._createSceneGraphNode())
|
||||
}
|
||||
}).catch(err => {
|
||||
console.error('Error in hit test', err)
|
||||
})
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
getImageData() {
|
||||
let canvas = document.createElement('canvas');
|
||||
let context = canvas.getContext('2d');
|
||||
let img = document.getElementById('aruco-marker');
|
||||
canvas.width = img.width;
|
||||
canvas.height = img.height;
|
||||
context.drawImage(img, 0, 0 );
|
||||
return context.getImageData(0, 0, img.width, img.height);
|
||||
}
|
||||
|
||||
// Save screen taps as normalized coordinates for use in this.updateScene
|
||||
_onTouchStart(ev){
|
||||
if (!ev.touches || ev.touches.length === 0) {
|
||||
console.error('No touches on touch event', ev)
|
||||
return
|
||||
}
|
||||
//save screen coordinates normalized to -1..1 (0,0 is at center and 1,1 is at top right)
|
||||
this._tapEventData = [
|
||||
ev.touches[0].clientX / window.innerWidth,
|
||||
ev.touches[0].clientY / window.innerHeight
|
||||
]
|
||||
}
|
||||
|
||||
// Creates a box used to indicate the location of an anchor offset
|
||||
_createSceneGraphNode(){
|
||||
let group = new THREE.Group()
|
||||
let geometry = new THREE.BoxBufferGeometry(0.1, 0.1, 0.1)
|
||||
let material = new THREE.MeshPhongMaterial({ color: '#99FF99' })
|
||||
let mesh = new THREE.Mesh(geometry, material)
|
||||
mesh.position.set(0, 0.05, 0)
|
||||
group.add(mesh)
|
||||
return group
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
window.addEventListener('DOMContentLoaded', () => {
|
||||
setTimeout(() => {
|
||||
try {
|
||||
window.pageApp = new ARKitImageDetectionExample(document.getElementById('target'))
|
||||
} catch(e) {
|
||||
console.error('page error', e)
|
||||
}
|
||||
}, 1000)
|
||||
})
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 11 KiB |
|
@ -263,6 +263,14 @@
|
|||
<img src="examples/opencv-aruco/screenshot.png" width="300" height="200"/>
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="examples/image_detection/">Image Detection</a>
|
||||
<a class="source" href="https://github.com/mozilla/webxr-polyfill/blob/master/examples/image_detection/index.html">source</a>
|
||||
<p>OpenCV Aruco marker detector.</p>
|
||||
<a class="img" href="examples/image_detection/">
|
||||
<img src="examples/image_detection/screenshot.png" width="300" height="200"/>
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
</body>
|
||||
|
|
|
@ -75,6 +75,10 @@ export default class Reality extends EventHandlerBase {
|
|||
throw new Error('Exending classes should implement _findAnchor')
|
||||
}
|
||||
|
||||
_createImageAnchor(uid, buffer, width, height, physicalWidthInMeters) {
|
||||
throw new Error('Exending classes should implement _createImageAnchor')
|
||||
}
|
||||
|
||||
/*
|
||||
Find an XRAnchorOffset that is at floor level below the current head pose
|
||||
returns a Promise that resolves either to an AnchorOffset or null if the floor level is unknown
|
||||
|
|
|
@ -70,6 +70,10 @@ export default class XRPresentationFrame {
|
|||
return this._session.reality._findAnchor(normalizedScreenX, normalizedScreenY, this._session.display)
|
||||
}
|
||||
|
||||
createImageAnchor(uid, buffer, width, height, physicalWidthInMeters) {
|
||||
this._session.reality._createImageAnchor(uid, buffer, width, height, physicalWidthInMeters)
|
||||
}
|
||||
|
||||
hitTestNoAnchor(normalizedScreenX, normalizedScreenY){
|
||||
// Array<VRHit> hitTestNoAnchor(float32, float32); // cast a ray to find all plane intersections in the Reality
|
||||
return this._session.reality._hitTestNoAnchor(normalizedScreenX, normalizedScreenY, this._session.display)
|
||||
|
|
|
@ -640,10 +640,16 @@ export default class ARKitWrapper extends EventHandlerBase {
|
|||
* Supply the image in an ArrayBuffer, typedArray or ImageData
|
||||
* width and height are in meters
|
||||
*/
|
||||
createImageAnchor(uid, buffer, width, height) {
|
||||
createImageAnchor(uid, buffer, width, height, physicalWidthInMeters) {
|
||||
var b64 = base64.encode(buffer);
|
||||
|
||||
// something like addAnchor?
|
||||
window.webkit.messageHandlers.addImageAnchor.postMessage({
|
||||
uid: uid,
|
||||
buffer: b64,
|
||||
imageWidth: width,
|
||||
imageHeight: height,
|
||||
physicalWidth: physicalWidthInMeters
|
||||
})
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1223,6 +1229,16 @@ ARKitWrapper.AR_TRACKING_CHANGED = 'ar_tracking_changed'
|
|||
ARKitWrapper.COMPUTER_VISION_DATA = 'cv_data'
|
||||
ARKitWrapper.USER_GRANTED_COMPUTER_VISION_DATA = 'user-granted-cv-data'
|
||||
|
||||
// ARKit Detection Image Orientations
|
||||
ARKitWrapper.ORIENTATION_UP = 1 // 0th row at top, 0th column on left - default orientation
|
||||
ARKitWrapper.ORIENTATION_UP_MIRRORED = 2 // 0th row at top, 0th column on right - horizontal flip
|
||||
ARKitWrapper.ORIENTATION_DOWN = 3 // 0th row at bottom, 0th column on right - 180 deg rotation
|
||||
ARKitWrapper.ORIENTATION_DOWN_MIRRORED = 4 // 0th row at bottom, 0th column on left - vertical flip
|
||||
ARKitWrapper.ORIENTATION_LEFT_MIRRORED = 5 // 0th row on left, 0th column at top
|
||||
ARKitWrapper.ORIENTATION_RIGHT = 6 // 0th row on right, 0th column at top - 90 deg CW
|
||||
ARKitWrapper.ORIENTATION_RIGHT_MIRRORED = 7 // 0th row on right, 0th column on bottom
|
||||
ARKitWrapper.ORIENTATION_LEFT = 8 // 0th row on left, 0th column at bottom - 90 deg CCW
|
||||
|
||||
// hit test types
|
||||
ARKitWrapper.HIT_TEST_TYPE_FEATURE_POINT = 1
|
||||
ARKitWrapper.HIT_TEST_TYPE_ESTIMATED_HORIZONTAL_PLANE = 2
|
||||
|
|
|
@ -470,6 +470,12 @@ export default class CameraReality extends Reality {
|
|||
})
|
||||
}
|
||||
|
||||
_createImageAnchor(uid, buffer, width, height, physicalWidthInMeters) {
|
||||
if (this._arKitWrapper) {
|
||||
this._arKitWrapper.createImageAnchor(uid, buffer, width, height, physicalWidthInMeters)
|
||||
}
|
||||
}
|
||||
|
||||
_removeAnchor(uid){
|
||||
if(this._arKitWrapper) {
|
||||
this._arKitWrapper.removeAnchor(uid)
|
||||
|
|
Загрузка…
Ссылка в новой задаче