Skip to content

Instantly share code, notes, and snippets.

@phsultan
Created May 1, 2018 22:24
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save phsultan/f664a7f14774416c2c5691245b13e127 to your computer and use it in GitHub Desktop.
Save phsultan/f664a7f14774416c2c5691245b13e127 to your computer and use it in GitHub Desktop.
const speech = require('@google-cloud/speech');
const fs = require('fs');
const logger = console;
class GoogleSpeech
{
constructor()
{
logger.debug('constructor()');
// Google Speech client.
this._client = new speech.SpeechClient();
// Google Speech configuration request.
this._request =
{
config : {
encoding : 'LINEAR16',
sampleRateHertz : 8000,
enableWordTimeOffsets : true,
languageCode : 'fr-FR'
},
// 'true' to perform continuous recognition even if the user pauses speaking.
singleUtterance : false,
// 'true' to enable tentative hypoteses.
interimResults : true
};
// Plain audio readable stream.
this._audioStream = null;
this._time = 0;
}
/**
* @param {Readable} audioStream
*/
start(filename)
{
logger.debug('start()');
//this._audioStream = fs.createReadStream(filename, { highWaterMark: sampleRateHertz*2*0.2 });
this._audioStream = fs.createReadStream(filename, { highWaterMark: 8000*2*0.2 });
this._audioStream
.on('data', (chunk) => {
this._audioStream.pause();
setTimeout(() => {
this._time += 200;
if (this._time % 1000 === 0) {
console.log(`[INFO] elapsed time : ${this._time/1000} sec`);
}
this._audioStream.resume();
}, 200);
})
this._start();
}
stop()
{
logger.debug('stop()');
}
_start()
{
logger.debug('_start()');
try
{
// Create a writable stream to which pipe the plain audio.
this._recognizeStream = this._client.streamingRecognize(this._request);
}
catch (error)
{
logger.error('streamingRecognize() error: [%s]', error.message);
return;
}
this._recognizeStream
.on('error', (error) =>
{
logger.error('streamingRecognize() "error" event [%s]', error.message);
//this._audioStream.pause();
//this._audioStream.unpipe(this._recognizeStream);
})
.on('data', (data) =>
{
if (data.error) {
logger.error('streamingRecognize() "data" event error [%s]', data.error);
this._audioStream.unpipe(this._recognizeStream);
this._audioStream.pause();
} else
logger.debug(data.results[0].alternatives[0].transcript);
})
.on('unpipe', () =>
{
delete this._recognizeStream;
this._start();
});
// Pipe the audio stream into the Speech API.
console.log('this._audioStream :', this._audioStream);
this._audioStream.pipe(this._recognizeStream);
}
}
let gSpeech = new GoogleSpeech();
gSpeech.start('test-recording.wav');
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment