mirror of
https://github.com/google-ar/three.ar.js.git
synced 2026-01-25 14:06:43 +00:00
177 lines
5.0 KiB
HTML
177 lines
5.0 KiB
HTML
<!--
|
|
/*
|
|
* Copyright 2017 Google Inc. All Rights Reserved.
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
* you may not use this file except in compliance with the License.
|
|
* You may obtain a copy of the License at
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
* See the License for the specific language governing permissions and
|
|
* limitations under the License.
|
|
*/
|
|
-->
|
|
<!DOCTYPE html>
|
|
<html lang="en">
|
|
<head>
|
|
<title>three.ar.js - Surfaces</title>
|
|
<meta charset="utf-8">
|
|
<meta name="viewport" content="width=device-width, user-scalable=no,
|
|
minimum-scale=1.0, maximum-scale=1.0">
|
|
<style>
|
|
body {
|
|
font-family: monospace;
|
|
margin: 0;
|
|
overflow: hidden;
|
|
position: fixed;
|
|
width: 100%;
|
|
height: 100vh;
|
|
-webkit-user-select: none;
|
|
user-select: none;
|
|
}
|
|
#info {
|
|
background-color: rgba(40, 40, 40, 0.4);
|
|
bottom: 0;
|
|
box-sizing: border-box;
|
|
color: rgba(255, 255, 255, 0.7);
|
|
left: 50%;
|
|
line-height: 1.3em;
|
|
padding: 0.75em;
|
|
position: absolute;
|
|
text-align: center;
|
|
transform: translate(-50%, 0);
|
|
width: 100%;
|
|
z-index: 10;
|
|
}
|
|
.divider {
|
|
color: rgba(255, 255, 255, 0.2);
|
|
padding: 0 0.5em;
|
|
}
|
|
#info a {
|
|
text-decoration: none;
|
|
color: white;
|
|
}
|
|
canvas {
|
|
position: absolute;
|
|
top: 0;
|
|
left: 0;
|
|
}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<div id="info">
|
|
<a href="https://github.com/google-ar/three.ar.js">three.ar.js</a><span class="divider">|</span>Render detected surfaces.
|
|
</div>
|
|
<script src="../third_party/three.js/three.js"></script>
|
|
<script src="../third_party/three.js/VRControls.js"></script>
|
|
<script src="../dist/three.ar.js"></script>
|
|
<script>
|
|
|
|
var vrDisplay, vrControls, arView;
|
|
var canvas, camera, scene, renderer;
|
|
|
|
/**
|
|
* Use the `getARDisplay()` utility to leverage the WebVR API
|
|
* to see if there are any AR-capable WebVR VRDisplays. Returns
|
|
* a valid display if found. Otherwise, display the unsupported
|
|
* browser message.
|
|
*/
|
|
THREE.ARUtils.getARDisplay().then(function (display) {
|
|
if (display) {
|
|
vrDisplay = display;
|
|
init();
|
|
} else {
|
|
THREE.ARUtils.displayUnsupportedMessage();
|
|
}
|
|
});
|
|
|
|
function init() {
|
|
// Setup the three.js rendering environment
|
|
renderer = new THREE.WebGLRenderer({ alpha: true });
|
|
renderer.setPixelRatio(window.devicePixelRatio);
|
|
renderer.setSize(window.innerWidth, window.innerHeight);
|
|
renderer.autoClear = false;
|
|
canvas = renderer.domElement;
|
|
document.body.appendChild(canvas);
|
|
scene = new THREE.Scene();
|
|
|
|
// Turn on the debugging panel
|
|
var arDebug = new THREE.ARDebug(vrDisplay, scene, {
|
|
showLastHit: false,
|
|
showPoseStatus: false,
|
|
showPlanes: true,
|
|
});
|
|
document.body.appendChild(arDebug.getElement());
|
|
|
|
// Creating the ARView, which is the object that handles
|
|
// the rendering of the camera stream behind the three.js
|
|
// scene
|
|
arView = new THREE.ARView(vrDisplay, renderer);
|
|
|
|
// The ARPerspectiveCamera is very similar to THREE.PerspectiveCamera,
|
|
// except when using an AR-capable browser, the camera uses
|
|
// the projection matrix provided from the device, so that the
|
|
// perspective camera's depth planes and field of view matches
|
|
// the physical camera on the device.
|
|
camera = new THREE.ARPerspectiveCamera(
|
|
vrDisplay,
|
|
60,
|
|
window.innerWidth / window.innerHeight,
|
|
vrDisplay.depthNear,
|
|
vrDisplay.depthFar
|
|
);
|
|
|
|
// VRControls is a utility from three.js that applies the device's
|
|
// orientation/position to the perspective camera, keeping our
|
|
// real world and virtual world in sync.
|
|
vrControls = new THREE.VRControls(camera);
|
|
|
|
// Bind our event handlers
|
|
window.addEventListener('resize', onWindowResize, false);
|
|
|
|
// Kick off the render loop!
|
|
update();
|
|
}
|
|
|
|
/**
|
|
* The render loop, called once per frame. Handles updating
|
|
* our scene and rendering.
|
|
*/
|
|
function update() {
|
|
// Render the device's camera stream on screen first of all.
|
|
// It allows to get the right pose synchronized with the right frame.
|
|
arView.render();
|
|
|
|
// Update our camera projection matrix in the event that
|
|
// the near or far planes have updated
|
|
camera.updateProjectionMatrix();
|
|
|
|
// Update our perspective camera's positioning
|
|
vrControls.update();
|
|
|
|
// Render our three.js virtual scene
|
|
renderer.clearDepth();
|
|
renderer.render(scene, camera);
|
|
|
|
// Kick off the requestAnimationFrame to call this function
|
|
// on the next frame
|
|
requestAnimationFrame(update);
|
|
}
|
|
|
|
/**
|
|
* On window resize, update the perspective camera's aspect ratio,
|
|
* and call `updateProjectionMatrix` so that we can get the latest
|
|
* projection matrix provided from the device
|
|
*/
|
|
function onWindowResize () {
|
|
camera.aspect = window.innerWidth / window.innerHeight;
|
|
camera.updateProjectionMatrix();
|
|
renderer.setSize(window.innerWidth, window.innerHeight);
|
|
}
|
|
</script>
|
|
</body>
|
|
</html>
|