Dialogflow - Streaming
// 1) | |
async function detectIntentStream(audio, cb) { | |
// 2) | |
const stream = sessionClient.streamingDetectIntent() | |
.on('data', function(data){ | |
// 3) | |
if (data.recognitionResult) { | |
console.log( | |
`Intermediate transcript: | |
${data.recognitionResult.transcript}` | |
); | |
} else { | |
console.log(`Detected intent:`); | |
cb(data); | |
} | |
}) | |
// 4) | |
.on('error', (e) => { | |
console.log(e); | |
}) | |
.on('end', () => { | |
console.log('on end'); | |
}); | |
// 5) | |
stream.write(request); | |
// 6) | |
await pump( | |
audio, | |
// 7) | |
new Transform({ | |
objectMode: true, | |
transform: (obj, _, next) => { | |
next(null, { inputAudio: obj, outputAudioConfig: { | |
audioEncoding: `OUTPUT_AUDIO_ENCODING_LINEAR_16` | |
} }); | |
} | |
}), | |
stream | |
); | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment