Skip to content

Instantly share code, notes, and snippets.

@minakhan01
Last active August 29, 2015 14:11
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save minakhan01/1765ccd15fb04d80d052 to your computer and use it in GitHub Desktop.
Save minakhan01/1765ccd15fb04d80d052 to your computer and use it in GitHub Desktop.
[wearscript] glass_basic.html
<html style="width:100%; height:100%; overflow:hidden">
<head>
<!-- You can include external scripts here like so... -->
<!--<script src="https://cdnjs.cloudflare.com/ajax/libs/zepto/1.0/zepto.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/underscore.js/1.5.2/underscore-min.js"></script>-->
</head>
<body style="width:100%; height:100%; overflow:hidden; margin:0" bgcolor="#000">
<canvas id="canvas" width="640" height="360" style="display:block"></canvas>
<script>
function server() {
WS.log('Welcome to WearScript');
WS.say('Welcome to WearScript');
WS.sound('SUCCESS')
// // Changes canvas color with head rotation
// WS.sensorOn('orientation', .15, function (data) {
// ctx.fillStyle = 'hsl(' + data['values'][0] + ', 90%, 50%)'
// ctx.fillRect(0, 0, 640, 360);
// });
// // Stream several sensors (can view in the Sensors tab)
// var sensors = ['gps', 'accelerometer', 'magneticField', 'gyroscope',
// 'light', 'gravity', 'linearAcceleration', 'rotationVector'];
// for (var i = 0; i < sensors.length; i++)
// WS.sensorOn(sensors[i], .15);
WS.dataLog(false, true, .15);
// Possible gestures: LONG_PRESS, SWIPE_DOWN, SWIPE_LEFT, SWIPE_RIGHT, TAP, TWO_TAP
// THREE_LONG_PRESS, THREE_TAP, TWO_LONG_PRESS, TWO_SWIPE_RIGHT, TWO_SWIPE_UP,
WS.gestureCallback('onGestureTAP', function () {
// Stream camera frames (can view in the Images tab)
WS.cameraOn(0.25);
});
// Named callback
var speech_callback=function (data) {
WS.say('you said ' + data);
function write(){
// closure example: data variable is available to inner function
WS.log('speech: ' + data);
}
write();
}
WS.gestureCallback('onGestureTWO_TAP', function () {
var in_gesture='TWO TAP';
// Named callback speech_callback
WS.speechRecognize('Say Something', speech_callback);
});
WS.gestureCallback('onGestureLONG_PRESS', function () {
WS.cameraVideo();
});
// Below this are more examples, uncomment to use them
//WS.liveCardCreate(false, .2);
/*
var tree = new WS.Cards();
tree.add('Body text', 'Footer text', function () {WS.say('selected')}, function () {WS.say('tapped')}, 'Menu0', function () {WS.say('menu0')}, 'Menu1', function () {WS.say('menu1')});
tree.add('Body text', 'Footer text', (new WS.Cards()).add('Child0', '0').add('Child1', '1'));
WS.cardTree(tree);
WS.displayCardTree();
*/
/*
WS.speechRecognize('Say Something', function (data) {
WS.log('speech: ' + data);
WS.say('you said ' + data);
});
*/
//WS.cameraPhoto();
//WS.cameraVideo();
//WS.cameraOff();
//WS.shutdown();
}
function main() {
if (WS.scriptVersion(1)) return;
ctx = document.getElementById('canvas').getContext("2d");
WS.serverConnect('{{WSUrl}}', server);
}
window.onload = main;
</script>
</body>
</html>
{"name":"Example"}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment