mirror of
https://github.com/google-ar/three.ar.js.git
synced 2026-01-25 14:06:43 +00:00
225 lines
6.6 KiB
HTML
225 lines
6.6 KiB
HTML
<!--
|
|
/*
|
|
* Copyright 2017 Google Inc. All Rights Reserved.
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
* you may not use this file except in compliance with the License.
|
|
* You may obtain a copy of the License at
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
* See the License for the specific language governing permissions and
|
|
* limitations under the License.
|
|
*/
|
|
-->
|
|
<!DOCTYPE html>
|
|
<html lang="en">
|
|
<head>
|
|
<title>three.ar.js - Spawn At Surface</title>
|
|
<meta charset="utf-8">
|
|
<meta name="viewport" content="width=device-width, user-scalable=no,
|
|
minimum-scale=1.0, maximum-scale=1.0">
|
|
<style>
|
|
body {
|
|
font-family: Monospace;
|
|
margin: 0px;
|
|
overflow: hidden;
|
|
position: fixed;
|
|
height: 100%;
|
|
width: 100%;
|
|
-webkit-user-select: none;
|
|
user-select: none;
|
|
}
|
|
#info {
|
|
color: #fff;
|
|
position: absolute;
|
|
bottom: 10px;
|
|
width: 100%;
|
|
background-color: #000;
|
|
text-align: center;
|
|
z-index: 100;
|
|
display:block;
|
|
}
|
|
a {
|
|
color: #ff0077;
|
|
}
|
|
canvas {
|
|
position: absolute;
|
|
top: 0;
|
|
left: 0;
|
|
}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<div id="info">
|
|
<a href="https://github.com/google-ar/three.ar.js" target="_blank">three.ar.js</a>
|
|
- Spawn At Surface
|
|
</div>
|
|
<script src="../third_party/three.js/three.js"></script>
|
|
<script src="../third_party/three.js/VRControls.js"></script>
|
|
<script src="../dist/three.ar.js"></script>
|
|
<script>
|
|
|
|
var vrDisplay;
|
|
var vrControls;
|
|
var arView;
|
|
|
|
var canvas;
|
|
var camera;
|
|
var scene;
|
|
var renderer;
|
|
var cube;
|
|
|
|
var colors = [
|
|
new THREE.Color( 0xffffff ),
|
|
new THREE.Color( 0xffff00 ),
|
|
new THREE.Color( 0xff00ff ),
|
|
new THREE.Color( 0xff0000 ),
|
|
new THREE.Color( 0x00ffff ),
|
|
new THREE.Color( 0x00ff00 ),
|
|
new THREE.Color( 0x0000ff ),
|
|
new THREE.Color( 0x000000 )
|
|
];
|
|
|
|
/**
|
|
* Use the `getARDisplay()` utility to leverage the WebVR API
|
|
* to see if there are any AR-capable WebVR VRDisplays. Returns
|
|
* a valid display if found. Otherwise, display the unsupported
|
|
* browser message.
|
|
*/
|
|
THREE.ARUtils.getARDisplay().then(function (display) {
|
|
if (display) {
|
|
vrDisplay = display;
|
|
init();
|
|
} else {
|
|
THREE.ARUtils.displayUnsupportedMessage();
|
|
}
|
|
});
|
|
|
|
function init() {
|
|
// Setup the three.js rendering environment
|
|
renderer = new THREE.WebGLRenderer({ alpha: true });
|
|
renderer.setPixelRatio(window.devicePixelRatio);
|
|
renderer.setSize(window.innerWidth, window.innerHeight);
|
|
renderer.autoClear = false;
|
|
canvas = renderer.domElement;
|
|
document.body.appendChild(canvas);
|
|
scene = new THREE.Scene();
|
|
|
|
// Creating the ARView, which is the object that handles
|
|
// the rendering of the camera stream behind the three.js
|
|
// scene
|
|
arView = new THREE.ARView(vrDisplay, renderer);
|
|
|
|
// The ARPerspectiveCamera is very similar to THREE.PerspectiveCamera,
|
|
// except when using an AR-capable browser, the camera uses
|
|
// the projection matrix provided from the device, so that the
|
|
// perspective camera's depth planes and field of view matches
|
|
// the physical camera on the device.
|
|
camera = new THREE.ARPerspectiveCamera(
|
|
vrDisplay,
|
|
60,
|
|
window.innerWidth / window.innerHeight,
|
|
vrDisplay.depthNear,
|
|
vrDisplay.depthFar
|
|
);
|
|
|
|
// VRControls is a utility from three.js that applies the device's
|
|
// orientation/position to the perspective camera, keeping our
|
|
// real world and virtual world in sync.
|
|
vrControls = new THREE.VRControls(camera);
|
|
|
|
// Create the cube geometry and add it to the scene. Set the position
|
|
// to (Infinity, Infinity, Infinity) so that it won't appear visible
|
|
// until the first hit is found, and move it there
|
|
var geometry = new THREE.BoxGeometry( 0.05, 0.05, 0.05 );
|
|
var faceIndices = ['a', 'b', 'c'];
|
|
for (var i = 0; i < geometry.faces.length; i++) {
|
|
var f = geometry.faces[i];
|
|
for (var j = 0; j < 3; j++) {
|
|
var vertexIndex = f[faceIndices[ j ]];
|
|
f.vertexColors[j] = colors[vertexIndex];
|
|
}
|
|
}
|
|
var material = new THREE.MeshBasicMaterial({ vertexColors: THREE.VertexColors });
|
|
cube = new THREE.Mesh(geometry, material);
|
|
scene.add(cube);
|
|
|
|
// Bind our event handlers
|
|
window.addEventListener('resize', onWindowResize, false);
|
|
canvas.addEventListener('touchstart', onClick, false);
|
|
|
|
// Kick off the render loop!
|
|
update();
|
|
}
|
|
|
|
/**
|
|
* The render loop, called once per frame. Handles updating
|
|
* our scene and rendering.
|
|
*/
|
|
function update() {
|
|
// Update our perspective camera's positioning
|
|
vrControls.update();
|
|
|
|
// Render the device's camera stream on screen
|
|
arView.render();
|
|
|
|
// Render our three.js virtual scene
|
|
renderer.clearDepth();
|
|
renderer.render(scene, camera);
|
|
|
|
// Kick off the requestAnimationFrame to call this function
|
|
// on the next frame
|
|
requestAnimationFrame(update);
|
|
}
|
|
|
|
/**
|
|
* On window resize, update the perspective camera's aspect ratio,
|
|
* and call `updateProjectionMatrix` so that we can get the latest
|
|
* projection matrix provided from the device
|
|
*/
|
|
function onWindowResize () {
|
|
camera.aspect = window.innerWidth / window.innerHeight;
|
|
camera.updateProjectionMatrix();
|
|
renderer.setSize(window.innerWidth, window.innerHeight);
|
|
}
|
|
|
|
/**
|
|
* When clicking on the screen, fire a ray from where the user clicked
|
|
* on the screen and if a hit is found, place a cube there.
|
|
*/
|
|
function onClick (e) {
|
|
// If we don't have a touches object, abort
|
|
// TODO: is this necessary?
|
|
if (!e.touches[0]) {
|
|
return;
|
|
}
|
|
|
|
// Inspect the event object and generate normalize screen coordinates
|
|
// (between 0 and 1) for the screen position.
|
|
var x = e.touches[0].pageX / window.innerWidth;
|
|
var y = e.touches[0].pageY / window.innerHeight;
|
|
|
|
// Send a ray from the point of click to the real world surface
|
|
// and attempt to find a hit. `hitTest` returns an array of potential
|
|
// hits.
|
|
var hits = vrDisplay.hitTest(x, y);
|
|
|
|
// If a hit is found, just use the first one
|
|
if (hits && hits.length) {
|
|
var hit = hits[0];
|
|
// Use the `placeObjectAtHit` utility to position
|
|
// the cube where the hit occurred
|
|
THREE.ARUtils.placeObjectAtHit(cube, // The object to place
|
|
hit, // The VRHit object to move the cube to
|
|
true, // Whether or not we also apply orientation
|
|
1); // Easing value from 0 to 1; we want to move
|
|
// the cube directly to the hit position
|
|
}
|
|
}
|
|
</script>
|
|
</body>
|
|
</html>
|