Created
December 29, 2023 17:48
-
-
Save pablanco/56cef3e2aa42b1f7bf8ccedfc40202ed to your computer and use it in GitHub Desktop.
index-gemini.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { Request, Response } from "express"; | |
import { GoogleGenerativeAI } from '@google/generative-ai'; | |
import dotenv from "dotenv"; | |
dotenv.config(); | |
// GoogleGenerativeAI required config | |
const configuration = new GoogleGenerativeAI(process.env.API_KEY); | |
// Model initialization | |
const modelId = "gemini-pro"; | |
const model = configuration.getGenerativeModel({ model: modelId }); | |
//These arrays are to maintain the history of the conversation | |
const conversationContext = []; | |
const currentMessages = []; | |
// Controller function to handle chat conversation | |
export const generateResponse = async (req: Request, res: Response) => { | |
try { | |
const { prompt } = req.body; | |
// Restore the previous context | |
for (const [inputText, responseText] of conversationContext) { | |
currentMessages.push({ role: "user", parts: inputText }); | |
currentMessages.push({ role: "model", parts: responseText }); | |
} | |
const chat = model.startChat({ | |
history: currentMessages, | |
generationConfig: { | |
maxOutputTokens: 100, | |
}, | |
}); | |
const result = await chat.sendMessage(prompt); | |
const response = await result.response; | |
const responseText = response.text(); | |
// Stores the conversation | |
conversationContext.push([prompt, responseText]); | |
res.send({ response: responseText }); | |
} catch (err) { | |
console.error(err); | |
res.status(500).json({ message: "Internal server error" }); | |
} | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment