Skip to content

Instantly share code, notes, and snippets.

@aeither
Created May 2, 2023 08:16
Show Gist options
  • Save aeither/74ecac1533beceb9abb7687435df5d59 to your computer and use it in GitHub Desktop.
Save aeither/74ecac1533beceb9abb7687435df5d59 to your computer and use it in GitHub Desktop.
const saveToMetal = async (text, videoId) => {
const body = {
index: INDEX_ID,
text: text,
metadata: { videoId },
}
const options = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-metal-api-key': API_KEY,
'x-metal-client-id': CLIENT_ID,
},
body: JSON.stringify(body),
}
const documents = await fetch('https://api.getmetal.io/v1/index', options)
.then((response) => response.json())
.catch((err) => console.error(err))
return documents
}
/**
* Fetch transcript and save the embeddings to db
*/
const storeVideo = async () => {
const transcriptArray = await YoutubeTranscript.fetchTranscript(videoId).catch(
(error) => {
throw new Error(error.message)
},
)
const transcript = reduceTranscript(transcriptArray)
const textSplitter = new RecursiveCharacterTextSplitter({ chunkSize: 1000 })
const docs = await textSplitter.createDocuments([transcript])
// Save chunks to Metal
for (const doc of docs) {
await saveToMetal(doc.pageContent, videoId)
}
}
const getEmbeddings = async (query, videoId) => {
try {
const response = await fetch('https://api.getmetal.io/v1/search', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-metal-api-key': API_KEY,
'x-metal-client-id': CLIENT_ID,
},
body: JSON.stringify({
index: INDEX_ID,
text: query,
filters: [
{
field: 'videoId',
value: videoId,
},
],
}),
})
return response.json()
} catch (error) {
throw error
}
}
async function getDocuments(query, videoId) {
const response = await getEmbeddings(query, videoId)
return response.data.map(
({ text, metadata }) =>
new Document({
pageContent: text,
metadata: metadata,
}),
)
}
/**
* QA Youtube Video
*/
const askVideo = async (message) => {
const docs = await getRelevantDocuments(message, videoId)
const llm = new ChatOpenAI({
temperature: 0.7,
})
const chain = loadQAStuffChain(llm)
const answer = await chain
.call({
input_documents: docs,
question: message,
})
.catch((e) => console.error(e))
console.log(answer)
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment