Skip to content

Instantly share code, notes, and snippets.

@26medias
Last active April 18, 2024 21:04
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save 26medias/d1c5dd1aebf2757ac0635adb8df63a9d to your computer and use it in GitHub Desktop.
Save 26medias/d1c5dd1aebf2757ac0635adb8df63a9d to your computer and use it in GitHub Desktop.
Google Automl Vision: Using a trained tfjs model in NodeJS to classify an image (full working example)
const tf = require("@tensorflow/tfjs-node");
const automl = require("@tensorflow/tfjs-automl");
// Tensorflow Inference
var tfInference = {
loadDictionary: function(modelUrl) {
const lastIndexOfSlash = modelUrl.lastIndexOf("/");
const prefixUrl = lastIndexOfSlash >= 0 ? modelUrl.slice(0, lastIndexOfSlash + 1) : "";
const dictUrl = path.normalize(path.dirname(modelUrl)+'/dict.txt');
const text = fs.readFileSync(dictUrl, { encoding: "utf-8" });
return text.trim().split("\n");
},
loadImageClassification: async function(modelUrl) {
const [model, dict] = await Promise.all([
tf.loadGraphModel('file://'+modelUrl), tfInference.loadDictionary(modelUrl)
]);
return new automl.ImageClassificationModel(model, dict);
},
decodeImage: function(imgPath) {
const imgSrc = fs.readFileSync(imgPath);
const arrByte = Uint8Array.from(Buffer.from(imgSrc));
return tf.node.decodeImage(arrByte);
},
init: async function (modelFilename) {
tfInference.model = await tfInference.loadImageClassification(modelFilename);
return tfInference.model;
},
classify: async function (model, imgFilename) {
const decodedImage = tfInference.decodeImage(imgFilename);
return await model.classify(decodedImage);
}
};
// Init the model
tfInference.init(path.normalize(__dirname+"/model.json")).then(function(model) {
// Get the classification for an image
tfInference.classify(model, path.normalize(__dirname+"/test.jpg")).then(function(response) {
console.log("Classification: ", response);
});
});
@ayandebbarman
Copy link

I am getting below error any idea whats wrong

The kernel 'undefined' for backend 'cpu' is already registered
The kernel 'undefined' for backend 'cpu' is already registered
2021-05-03 15:11:09.697077: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
/Users/ayan.barman/Documents/Node/Tensorflow/node_modules/@tensorflow/tfjs-automl/dist/index.js:18
export { ImageClassificationModel, loadImageClassification } from './img_classification';
^^^^^^

SyntaxError: Unexpected token 'export'
at Object.compileFunction (node:vm:355:18)
at wrapSafe (node:internal/modules/cjs/loader:1038:15)
at Module._compile (node:internal/modules/cjs/loader:1072:27)
at Object.Module._extensions..js (node:internal/modules/cjs/loader:1137:10)
at Module.load (node:internal/modules/cjs/loader:988:32)
at Function.Module._load (node:internal/modules/cjs/loader:828:14)
at Module.require (node:internal/modules/cjs/loader:1012:19)
at require (node:internal/modules/cjs/helpers:93:18)
at Object. (/Users/ayan.barman/Documents/Node/Tensorflow/tfautoml.js:2:16)
at Module._compile (node:internal/modules/cjs/loader:1108:14)

@JoseLuisR-SIA
Copy link

thanks!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment