Skip to content

Instantly share code, notes, and snippets.

@tandevmode
Created December 9, 2022 08:53
Show Gist options
  • Save tandevmode/cf7e16077cc28a04b05e26d73cdd9101 to your computer and use it in GitHub Desktop.
Save tandevmode/cf7e16077cc28a04b05e26d73cdd9101 to your computer and use it in GitHub Desktop.
const functions = require("firebase-functions");
const region = "asia-northeast1"
const axios = require('axios');
const FormData = require('form-data');
const speech = require('@google-cloud/speech');
// สำหรับจัดการไฟล์
const path = require("path");
const os = require("os");
const fs = require("fs");
// Instantiates a client
const client = new speech.SpeechClient();
// LINE API
const LINE_MESSAGING_API = "https://api.line.me/v2/bot";
const LINE_CONTENT_API = "https://api-data.line.me/v2/bot/message";
const LINE_HEADER = {
"Content-Type": "application/json",
Authorization: "Bearer XXXXX"
};
// ZAMZAR API สำหรับแปลงไฟล์เสียงเป็น .wav
const ZAMZAR_API_KEY = "YYYYYY";
const ZAMZAR_CREATE_JOB_API = "https://api.zamzar.com/v1/jobs";
const ZAMZAR_DOWNLOAD_API = "https://api.zamzar.com/v1/files";
const ZAMZAR_HEADER = {
"Content-Type": "multipart/form-data"
};
exports.LineWebhook = functions.region(region).https.onRequest(async (req, res) => {
if (req.method === "POST") {
let event = req.body.events[0]
console.log("Webhook: " + JSON.stringify(event));
if (event === undefined) {
return res.end();
}
if (event.type === 'message' && event.message.type === 'audio') {
// ดึงไฟล์เสียงของผู้ใช้ที่ส่งมาจาก LINE
const url = `${LINE_CONTENT_API}/${event.message.id}/content`;
const audioFile = await axios({
method: "get",
headers: LINE_HEADER,
url: url,
responseType: "arraybuffer"
});
// เซฟไฟล์เสียงของผู้ใช้ลงเครื่อง เนื่องจากเป็น iOS เลยได้ .m4a
const filenameTimestamp = event.timestamp;
const m4aLocalFile = path.join(os.tmpdir(), filenameTimestamp + ".m4a");
fs.writeFileSync(m4aLocalFile, audioFile.data);
// ทำการแปลงไฟล์เสียงจาก .m4a เป็น .wav
const wavFile = await convertAndDownloadWAVFile(m4aLocalFile);
// เซฟไฟล์เสียงที่เป็น .wav ลงเครื่อง
const wavLocalFile = path.join(os.tmpdir(), filenameTimestamp + ".wav");
fs.writeFileSync(wavLocalFile, wavFile.data);
// เรียกใช้ Google Speech-to-text API
const resultText = await transcribeSpeech(wavLocalFile);
await reply(event.replyToken,
[{
"type": "sticker",
"packageId": "6359",
"stickerId": "11069861"
},
{
"type": "text",
"text": "Speech-to-text Result: " + resultText
}]
)
}
}
return res.end()
})
const createConvertJobToWAV = async (audioLocalFile) => {
var bodyFormData = new FormData();
bodyFormData.append('target_format', 'wav');
bodyFormData.append('source_file', fs.createReadStream(audioLocalFile));
return await axios({
method: "post",
url: ZAMZAR_CREATE_JOB_API,
headers: ZAMZAR_HEADER,
data: bodyFormData,
auth: {
username: ZAMZAR_API_KEY
}
});
}
const isConvertJobSuccess = async (jobId) => {
return await axios({
method: "get",
url: `${ZAMZAR_CREATE_JOB_API}/${jobId}`,
auth: {
username: ZAMZAR_API_KEY
}
});
}
const downloadWAVFile = async (targetFileId) => {
return await axios({
method: "get",
url: `${ZAMZAR_DOWNLOAD_API}/${targetFileId}/content`,
responseType: "arraybuffer",
auth: {
username: ZAMZAR_API_KEY
}
});
}
const convertAndDownloadWAVFile = async (m4aLocalFile) => {
const resultFromConvertFile = await createConvertJobToWAV(m4aLocalFile);
// เนื่องจาการแปลงไฟล์จะเป็น Job ซึ่งมันจะไม่คืนผลลัพธ์กลับมาในทันที เราเลยต้องทำการ Sleep Chatbot เราประมาน 5 วิ ก่อนที่จะไปดึงผลลัพธ์ได้
await new Promise(r => setTimeout(r, 5000));
const resultFromChecking = await isConvertJobSuccess(resultFromConvertFile.data.id);
return await downloadWAVFile(resultFromChecking.data.target_files[0].id);
}
const transcribeSpeech = async (wavFilename) => {
const audio = {
content: fs.readFileSync(wavFilename).toString('base64'),
};
// The audio file's encoding, sample rate in hertz, and BCP-47 language code
const config = {
encoding: 'LINEAR16',
sampleRateHertz: 16000,
languageCode: 'th-TH',
};
const request = {
audio: audio,
config: config,
};
// Detects speech in the audio file
const [response] = await client.recognize(request);
const transcription = await response.results
.map(result => result.alternatives[0].transcript)
.join('\n');
console.log('Result: ', JSON.stringify(response.results));
return transcription;
}
const reply = async (replyToken, payload) => {
await axios({
method: "post",
url: `${LINE_MESSAGING_API}/message/reply`,
headers: LINE_HEADER,
data: JSON.stringify({
replyToken: replyToken,
messages: payload
})
});
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment