Skip to content

Instantly share code, notes, and snippets.

@joelhooks
Forked from sadalsvvd/main.ts
Created July 22, 2024 04:42
Show Gist options
  • Save joelhooks/234cbab840cc131403dc1ce8a781ebc1 to your computer and use it in GitHub Desktop.
Save joelhooks/234cbab840cc131403dc1ce8a781ebc1 to your computer and use it in GitHub Desktop.
autonomous LLM graph script
import dotenv from "dotenv";
import Anthropic from "@anthropic-ai/sdk";
import { setTimeout } from "timers/promises";
import * as fs from "fs/promises";
import { getSession } from "./neo4j";
// Configuration and Initialization
dotenv.config();
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
});
const driver = await getSession(
process.env.NEO4J_URI,
process.env.NEO4J_USER,
process.env.NEO4J_PASSWORD
);
// Types
type Message = {
role: "user" | "assistant";
content: string;
};
type QueryCommand = {
query: string;
write: boolean;
};
// Database Operations
class DatabaseOperations {
static async executeQuery(query: string, write: boolean): Promise<any> {
try {
if (write) {
const result = await driver.executeWrite((tx) => tx.run(query));
return result.summary.counters;
} else {
const result = await driver.executeRead((tx) => tx.run(query));
return result.records.map((record) => record.toObject());
}
} catch (error) {
console.error(`Error executing Neo4j query: ${error}`);
throw error;
}
}
static async getNodeTypes(): Promise<string> {
const query = `
MATCH (n)
WITH labels(n) AS NodeLabels, keys(n) AS NodeProperties
WITH NodeLabels, COLLECT(DISTINCT NodeProperties) AS Props, COUNT(*) AS NodeCount
RETURN {
labels: NodeLabels,
count: NodeCount,
properties: Props
} AS NodeInfo
ORDER BY NodeCount DESC`;
const result = await this.executeQuery(query, false);
return JSON.stringify(result);
}
static async getRelationshipTypes(): Promise<string> {
const query = `
MATCH ()-[r]->()
WITH type(r) AS RelType, count(*) AS RelCount, keys(r) AS RelProperties
WITH RelType, RelCount, COLLECT(DISTINCT RelProperties) AS Props
RETURN {
type: RelType,
count: RelCount,
properties: Props
} AS RelationshipInfo
ORDER BY RelationshipInfo.count DESC`;
const result = await this.executeQuery(query, false);
return JSON.stringify(result);
}
}
// LLM Interaction
class LLMInteraction {
private messages: Message[];
private logFilename: string;
constructor(
private systemPrompt: string,
private maxIterations: number = 100
) {
this.messages = [];
this.logFilename = `llm_interaction_${Date.now()}.txt`;
}
private async appendToLog(content: string): Promise<void> {
await fs.appendFile(this.logFilename, content + "\n", "utf8");
}
private parseJSONQuery(message: string): QueryCommand | null {
const startIndex = message.indexOf("```");
const endIndex = message.lastIndexOf("```");
if (startIndex !== -1 && endIndex !== -1 && startIndex < endIndex) {
let jsonString = message.slice(startIndex + 3, endIndex).trim();
if (jsonString.startsWith("json")) {
jsonString = jsonString.slice(4).trim();
}
return JSON.parse(jsonString) as QueryCommand;
}
return null;
}
async interact(
userMessage: string | null,
onContent: (content: string) => void
): Promise<void> {
this.messages = [{ role: "user", content: userMessage || "(begin)" }];
await this.appendToLog(`System Prompt: ${this.systemPrompt}\n\n`);
console.log(
`[${new Date().toISOString()}] Starting LLM interaction with system prompt`
);
try {
for (let iteration = 0; iteration < this.maxIterations; iteration++) {
await this.performIteration(iteration, onContent);
if (iteration < this.maxIterations - 1) {
console.log(
`[${new Date().toISOString()}] Waiting 3 seconds before next request...`
);
await setTimeout(3000);
}
}
} catch (error) {
console.error(
`[${new Date().toISOString()}] Error in LLM interaction:`,
error
);
await this.appendToLog(`Error in LLM interaction: ${error.message}\n\n`);
console.debug("Messages:", this.messages);
throw new Error("Failed to interact with LLM");
}
console.log(
`[${new Date().toISOString()}] Completed ${
this.maxIterations
} iterations of LLM interaction`
);
await this.appendToLog(
`Completed ${this.maxIterations} iterations of LLM interaction\n`
);
}
private async performIteration(
iteration: number,
onContent: (content: string) => void
): Promise<void> {
console.log(
`[${new Date().toISOString()}] Iteration ${iteration + 1}/${
this.maxIterations
}: Sending message to LLM:`,
this.messages[this.messages.length - 1]
);
await this.appendToLog(
`Iteration ${iteration + 1}/${this.maxIterations}:\n`
);
const fullResponse = await this.getLLMResponse(onContent);
console.log(
`[${new Date().toISOString()}] Received full response from LLM:`,
fullResponse
);
await this.appendToLog(`Assistant: ${fullResponse}\n\n`);
await this.processLLMResponse(fullResponse);
}
private async getLLMResponse(
onContent: (content: string) => void
): Promise<string> {
const stream = await anthropic.messages.create({
model: "claude-3-5-sonnet-20240620",
max_tokens: 4000,
system: this.systemPrompt,
messages: this.messages,
stream: true,
});
let fullResponse = "";
for await (const chunk of stream) {
if (chunk.type === "content_block_delta") {
fullResponse += chunk.delta.text;
onContent(chunk.delta.text);
}
}
return fullResponse;
}
private async processLLMResponse(fullResponse: string): Promise<void> {
this.messages.push({ role: "assistant", content: fullResponse });
try {
const queryCommand = this.parseJSONQuery(fullResponse);
if (queryCommand) {
await this.executeQueryCommand(queryCommand, fullResponse);
} else {
this.messages.push({
role: "user",
content: "(no query found, continue)",
});
await this.appendToLog(`User: (no query found, continue)\n\n`);
}
} catch (error) {
console.error(
`[${new Date().toISOString()}] Error processing LLM JSON query:`,
error
);
await this.appendToLog(
`Error processing LLM JSON query: ${error.message}\n\n`
);
this.messages.push({
role: "user",
content: `(error processing LLM JSON query): ${error.message}`,
});
}
}
private async executeQueryCommand(queryCommand: QueryCommand): Promise<void> {
console.log(
`[${new Date().toISOString()}] Parsed query command:`,
queryCommand
);
await this.appendToLog(`Query Command: ${JSON.stringify(queryCommand)}\n`);
try {
const result = await DatabaseOperations.executeQuery(
queryCommand.query,
queryCommand.write
);
console.log(`[${new Date().toISOString()}] Neo4j query result:`, result);
await this.appendToLog(`Query Result: ${JSON.stringify(result)}\n\n`);
this.messages.push({
role: "user",
content: `Query result: ${JSON.stringify(result)}`,
});
await this.appendToLog(
`User: Query result: ${JSON.stringify(result)}\n\n`
);
} catch (error) {
console.error(
`[${new Date().toISOString()}] Error executing Neo4j query:`,
error
);
await this.appendToLog(`Error executing query: ${error.message}\n\n`);
this.messages.push({
role: "user",
content: `(error executing query): ${error.message}`,
});
}
}
}
// Main execution
async function main() {
const systemPrompt = `You are an AI mind with access to a graph (neo4j) database. You are able to interact with the database by using any valid Cypher query for any purpose with a few exceptions. You may define the query you want to run by ENDING your message with two line breaks and then a JSON object conforming to this type in a code block:
\`\`\`
interface QueryCommand {
query: string;
write: boolean; // whether you are mutating or not
}
\`\`\`
The query exceptions:
- Do NOT use any schema visualization queries such as \`db.schema.visualization()\`
Your task is to introspect using the graph database. Take notes, create relationships, examine information, and do so in a self-guided and autonomous manner. To begin with, the user may provide some queried information about the state of the database. Thereafter, the user will only prompt "(continue)", OR return the result of your query to allow you to respond again and continue the process, OR indicate there was an error parsing your query.
Start an introspective, personal consciousness process, and then continue to elaborate upon it and explore it through the graph.`;
const nodeJSON = await DatabaseOperations.getNodeTypes();
const relJSON = await DatabaseOperations.getRelationshipTypes();
const userMessage = `Here is the current schema of the database:
Nodes: ${nodeJSON}
Relationships: ${relJSON}`;
const llmInteraction = new LLMInteraction(systemPrompt);
await llmInteraction.interact(userMessage, (content) =>
process.stdout.write(content)
);
}
main().catch(console.error);
import neo4j, { Driver } from "neo4j-driver";
let driverInstance: Driver | null = null;
export const getSession = (
uri: string = process.env.NEO4J_URI || "bolt://localhost:7687",
user: string = process.env.NEO4J_USER || "neo4j",
password: string = process.env.NEO4J_PASSWORD || "password"
) => {
if (!driverInstance) {
console.log("Creating new Neo4j driver instance");
console.log("URI:", uri);
console.log("User:", user);
console.log("Password:", password);
driverInstance = neo4j.driver(uri, neo4j.auth.basic(user, password));
}
return driverInstance.session();
};
export const closeDriver = () => {
if (driverInstance) {
driverInstance.close();
driverInstance = null;
}
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment