Skip to content

Instantly share code, notes, and snippets.

@mattiamanzati
Created August 27, 2018 13:37
Show Gist options
  • Save mattiamanzati/b680d1c326b20b1d01c013df138ace60 to your computer and use it in GitHub Desktop.
Save mattiamanzati/b680d1c326b20b1d01c013df138ace60 to your computer and use it in GitHub Desktop.
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
var _this = this;
Object.defineProperty(exports, "__esModule", { value: true });
var tf = require("@tensorflow/tfjs");
// Load the binding:
//require('@tensorflow/tfjs-node'); // Use '@tensorflow/tfjs-node-gpu' if running with GPU.
// utils
var tuple = function (a, b) { return [a, b]; };
// prepare the data, first is result, second is the raw text
var data = [
[0, 'aaaaaaaaa'],
[0, 'aaaa'],
[1, 'bbbbbbbbb'],
[1, 'bbbbbb']
];
// normalize the data
var arrayFill = [-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1];
var normalizeData = data.map(function (item) {
return tuple(item[0], item[1].split('').map(function (c) { return c.charCodeAt(0); }).concat(arrayFill).slice(0, 10));
});
var xs = tf.tensor(normalizeData.map(function (i) { return i[1]; }));
var ys = tf.tensor(normalizeData.map(function (i) { return i[0]; }));
console.log(xs);
// Configs
var LEARNING_RATE = 1e-4;
// Train a simple model:
//const optimizer = tf.train.adam(LEARNING_RATE)
var model = tf.sequential();
model.add(tf.layers.embedding({ inputDim: 1000, outputDim: 16 }));
model.add(tf.layers.globalAveragePooling1d({}));
model.add(tf.layers.dense({ units: 16, activation: 'relu' }));
model.add(tf.layers.dense({ units: 1, activation: 'sigmoid' }));
model.summary();
model.compile({ optimizer: 'adam', loss: 'binaryCrossentropy', metrics: ['accuracy'] });
model.fit(xs, ys, {
epochs: 10,
validationData: [xs, ys],
callbacks: {
onEpochEnd: function (epoch, log) { return __awaiter(_this, void 0, void 0, function () {
return __generator(this, function (_a) {
console.log("Epoch " + epoch + ": loss = " + log.loss);
return [2 /*return*/];
});
}); }
}
});
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment