Skip to content

Instantly share code, notes, and snippets.

@erikparr
Created September 22, 2019 15:59
Show Gist options
  • Save erikparr/45844513d1eeb5db8390ede1b7eecb92 to your computer and use it in GitHub Desktop.
Save erikparr/45844513d1eeb5db8390ede1b7eecb92 to your computer and use it in GitHub Desktop.
<!--
/*
* Copyright 2017 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.ar.js - Anchors</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no,
minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
font-family: monospace;
margin: 0;
overflow: hidden;
position: fixed;
width: 100%;
height: 100vh;
-webkit-user-select: none;
user-select: none;
}
#info {
position: absolute;
left: 50%;
bottom: 0;
transform: translate(-50%, 0);
margin: 1em;
z-index: 10;
display: block;
width: 100%;
line-height: 2em;
text-align: center;
}
#info a,
#info .title {
padding: 0.4em 0.6em;
border-radius: 0.1em;
}
#info a {
color: rgba(255, 255, 255, 0.8);
background-color: rgba(40, 40, 40, 0.6);
font-weight: bold;
text-decoration: none;
}
.title {
color: rgba(255, 255, 255, 0.9);
background-color: rgba(40, 40, 40, 0.4);
margin-left: 0.2em;
}
canvas {
position: absolute;
top: 0;
left: 0;
}
</style>
</head>
<body>
<script src='js/three.js'></script>
<script src='js/three.ar.js'></script>
<script src="js/VRControls.js"></script>
<script src="js/curves/NURBSCurve.js"></script>
<script src="js/curves/NURBSSurface.js"></script>
<script src="js/curves/NURBSUtils.js"></script>
<script src="js/NurbsRibbon.js"></script>
<script src="js/NurbsRibbonGroup.js"></script>
<script src="js/THREE.MeshLine.js"></script>
<script src="js/jsQR.js"></script>
<script>
var vrDisplay;
var vrFrameData;
var vrControls;
var arView;
var canvas;
var camera;
var scene;
var renderer;
var cube;
var cubes = [];
var anchorManager;
var strokeTextures = [];
var bufferScene, bufferTexture;
var CUBE_SIZE_IN_METERS = 0.18;
var colors = [
new THREE.Color(0xff66a8),
new THREE.Color(0xdf2dff),
new THREE.Color(0x6d1eff),
new THREE.Color(0xa72dff),
new THREE.Color(0xf0b5b3),
new THREE.Color(0x00ff00),
new THREE.Color(0x000000),
new THREE.Color(0xffffff)
];
var ribbons;
var filename = 'shoeCurves2.txt';
/**
* Use the `getARDisplay()` utility to leverage the WebVR API
* to see if there are any AR-capable WebVR VRDisplays. Returns
* a valid display if found. Otherwise, display the unsupported
* browser message.
*/
THREE.ARUtils.getARDisplay().then(function (display) {
if (display) {
vrFrameData = new VRFrameData();
vrDisplay = display;
init();
} else {
THREE.ARUtils.displayUnsupportedMessage();
}
});
function init() {
// Turn on the debugging panel
// var arDebug = new THREE.ARDebug(vrDisplay);
// document.body.appendChild(arDebug.getElement());
// Setup the three.js rendering environment
renderer = new THREE.WebGLRenderer({ alpha: true });
renderer.setPixelRatio(window.devicePixelRatio);
console.log('setRenderer size', window.innerWidth, window.innerHeight);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.autoClear = false;
canvas = renderer.domElement;
document.body.appendChild(canvas);
scene = new THREE.Scene();
ribbons = new NurbsRibbonGroup();
ribbons.colors
// Create a different scene to hold our buffer objects
bufferScene = new THREE.Scene();
// Create the texture that will store our result
bufferTexture = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight, { minFilter: THREE.LinearFilter, magFilter: THREE.NearestFilter });
var loader = new THREE.TextureLoader();
var texture1 = loader.load("textures/stroke1.png");
var texture2 = loader.load("textures/stroke2.png");
var texture3 = loader.load("textures/stroke3.png");
strokeTextures.push(texture1);
strokeTextures.push(texture2);
strokeTextures.push(texture3);
ribbons.init(strokeTextures);
loadSplinePoints();
// Creating the ARView, which is the object that handles
// the rendering of the camera stream behind the three.js
// scene
arView = new THREE.ARView(vrDisplay, renderer);
// The ARPerspectiveCamera is very similar to THREE.PerspectiveCamera,
// except when using an AR-capable browser, the camera uses
// the projection matrix provided from the device, so that the
// perspective camera's depth planes and field of view matches
// the physical camera on the device.
camera = new THREE.ARPerspectiveCamera(
vrDisplay,
60,
window.innerWidth / window.innerHeight,
vrDisplay.depthNear,
vrDisplay.depthFar
);
// VRControls is a utility from three.js that applies the device's
// orientation/position to the perspective camera, keeping our
// real world and virtual world in sync.
vrControls = new THREE.VRControls(camera);
// Create the cube geometry that we'll copy and place in the
// scene when the user clicks the screen
var geometry = new THREE.BoxGeometry(0.05, 0.05, 0.05);
var faceIndices = ['a', 'b', 'c'];
for (var i = 0; i < geometry.faces.length; i++) {
var f = geometry.faces[i];
for (var j = 0; j < 3; j++) {
var vertexIndex = f[faceIndices[j]];
f.vertexColors[j] = colors[vertexIndex];
}
}
var material = new THREE.MeshBasicMaterial({ vertexColors: THREE.VertexColors });
cube = new THREE.Mesh(geometry, material);
// Bind our event handlers
window.addEventListener('resize', onWindowResize, false);
window.addEventListener('touchstart', onClick, false);
anchorManager = new THREE.ARAnchorManager(vrDisplay);
// Kick off the render loop!
update();
}
/**
* The render loop, called once per frame. Handles updating
* our scene and rendering.
*/
function update() {
// Clears color from the frame before rendering the camera (arView) or scene.
renderer.clearColor();
// Render the device's camera stream on screen first of all.
// It allows to get the right pose synchronized with the right frame.
arView.render();
if (ribbons.isInited)
ribbons.update();
// Update our camera projection matrix in the event that
// the near or far planes have updated
camera.updateProjectionMatrix();
// From the WebVR API, populate `vrFrameData` with
// updated information for the frame
vrDisplay.getFrameData(vrFrameData);
// Render onto our off-screen texture
renderer.render(bufferScene, camera, bufferTexture);
// Update our perspective camera's positioning
vrControls.update();
// Render our three.js virtual scene
renderer.clearDepth();
renderer.render(scene, camera);
// Kick off the requestAnimationFrame to call this function
// on the next frame
requestAnimationFrame(update);
}
function loadSplinePoints() {
var scalar = 0.0222;
var manager = new THREE.LoadingManager();
// manager.onLoad = function () {}
var loader = new THREE.FileLoader(manager);
//loader.crossOrigin = '';
//load a text file and output the result to the console
loader.load(
// resource URL
'curve-data/' + filename,
// onLoad callback
function (data) {
const splitCurves = data.split("\n\n");
splitCurves.forEach(function (curveElement, curveIndex) {
var curvePts = []; //new array to be added to list of arrays
console.log("newline");
const splitString = curveElement.split("\n");
var index = 0;
splitString.forEach(function (element) {
const pt = element.split(", ");
const vec = new THREE.Vector3(parseFloat(pt[0]) * scalar, parseFloat(pt[1]) * scalar, parseFloat(pt[2]) * scalar);
if (vec.x != 0) {
// loadedCurvePts.push(vec);
curvePts.push(vec);
}
});
if (curvePts.length > 0) {
// shoeCurves.push(curvePts.slice()); // slice copies by value rather than reference
ribbons.ribbons[curveIndex].loadedCurvePts = curvePts.slice();
console.log("ribbons data size: " + ribbons.ribbons[curveIndex].loadedCurvePts.length);
}
});
},
// onProgress callback
function (xhr) {
console.log((xhr.loaded / xhr.total * 100) + '% loaded');
},
// onError callback
function (err) {
console.error('An error happened');
}
);
}
/**
* On window resize, update the perspective camera's aspect ratio,
* and call `updateProjectionMatrix` so that we can get the latest
* projection matrix provided from the device
*/
function onWindowResize() {
console.log('setRenderer size', window.innerWidth, window.innerHeight);
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
/**
* When clicking on the screen, create a cube at the user's
* current position.
*/
function onClick(e) {
// const code = jsQR(bufferTexture, window.innerWidth, window.innerHeight);
// if (code) {
// console.log("Found QR code", code);
// } else {
// console.log("nope", code);
// }
ribbons.TriggerDrawCurvesFromData();
// If the user touched with 2 or more fingers, remove the latest model and
// its anchor.
if (cubes.length > 0 && e.touches.length > 1) {
anchorManager.remove(cubes[0]);
scene.remove(cubes[0]);
cubes.splice(0, 1);
return;
}
// Fetch the pose data from the current frame
var pose = vrFrameData.pose;
// Convert the pose orientation and position into
// THREE.Quaternion and THREE.Vector3 respectively
var ori = new THREE.Quaternion(
pose.orientation[0],
pose.orientation[1],
pose.orientation[2],
pose.orientation[3]
);
var pos = new THREE.Vector3(
pose.position[0],
pose.position[1],
pose.position[2]
);
var dirMtx = new THREE.Matrix4();
dirMtx.makeRotationFromQuaternion(ori);
var push = new THREE.Vector3(0, 0, -1.0);
push.transformDirection(dirMtx);
pos.addScaledVector(push, 0.125);
// Clone our cube object and place it at the camera's
// current position
// var cubeClone = cube.clone();
// scene.add(cubeClone);
// cubeClone.position.copy(pos);
// cubeClone.quaternion.copy(ori);
// cubes.push(cubeClone);
// anchorManager.add(cubeClone);
}
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment