Skip to content

Instantly share code, notes, and snippets.

@miguejarias
Created April 24, 2025 19:56
Show Gist options
  • Save miguejarias/2d84bd8111c6d420a912f049445ecbb5 to your computer and use it in GitHub Desktop.
Save miguejarias/2d84bd8111c6d420a912f049445ecbb5 to your computer and use it in GitHub Desktop.
import "jsr:@std/dotenv/load";
const openrouterApiKey = Deno.env.get("OPENROUTER_API_KEY");
async function searchGutenbergBooks(searchTerms: string[]): Promise<any[]> {
const searchQuery = searchTerms.join(" ");
const url = "https://gutendex.com/books";
const response = await fetch(`${url}?search=${searchQuery}`);
const data = await response.json();
return data.results.map((book: any) => ({
id: book.id,
title: book.title,
authors: book.authors,
}));
}
const tools = [
{
type: "function",
function: {
name: "searchGutenbergBooks",
description:
"Search for books in the Project Gutenberg library based on specified search terms",
parameters: {
type: "object",
properties: {
search_terms: {
type: "array",
items: {
type: "string",
},
description:
"List of search terms to find books in the Gutenberg library (e.g. ['dickens', 'great'] to search for books by Dickens with 'great' in the title)",
},
},
required: ["search_terms"],
},
},
},
];
const TOOL_MAPPING: any = {
searchGutenbergBooks,
};
const messages: any = [
{ role: "system", content: "You are a helpful assistant." },
];
async function callLLM(messages: any[]): Promise<any> {
const response = await fetch(
"https://openrouter.ai/api/v1/chat/completions",
{
method: "POST",
headers: {
Authorization: `Bearer ${openrouterApiKey}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "google/gemini-2.0-flash-001",
tools,
messages,
}),
},
);
if (!response.ok) {
throw new Error(`Response status: ${response.status}`);
}
const data = await response.json();
messages.push(data.choices[0].message);
return data;
}
async function getToolResponse(responseJson: any): Promise<any> {
const toolCalls = responseJson.choices[0].message.tool_calls;
for (const toolCall of toolCalls) {
const toolName = toolCall.function.name;
const toolArgs = [];
const args = JSON.parse(toolCall.function.arguments);
for (const arg in args) {
toolArgs.push(args[arg]);
}
const toolResponse = await TOOL_MAPPING[toolName](...toolArgs);
return {
role: "tool",
toolCallId: toolCall.id,
name: toolName,
content: JSON.stringify(toolResponse),
};
}
}
async function main() {
const readPrompt = prompt("What can I help you with today?");
messages.push({
role: "user",
content: readPrompt,
});
console.log("searching...");
while (true) {
const json = await callLLM(messages);
if (json.choices[0].message.tool_calls) {
messages.push(await getToolResponse(json));
} else {
break;
}
}
console.log(messages[messages.length - 1].content);
}
main();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment