Created
April 29, 2025 16:02
-
-
Save tkarna/c2656e9a18badd5191bfddbf9c5befb1 to your computer and use it in GitHub Desktop.
Langchain ReAct agent example
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
ReAct agent example | |
Adapted from: https://langchain-ai.github.io/langgraph/how-tos/tool-calling/ | |
""" | |
from langchain_core.tools import tool | |
from langgraph.prebuilt import ToolNode | |
from langgraph.graph import StateGraph, MessagesState | |
from langgraph.prebuilt import ToolNode | |
from langgraph.graph import StateGraph, MessagesState, START, END | |
# from langchain_anthropic import ChatAnthropic | |
from codeassistant.llm_forge import load_llm_forge | |
@tool | |
def get_weather(location: str): | |
"""Call to get the current weather.""" | |
if location.lower() in ["sf", "san francisco"]: | |
return "It's 60 degrees and foggy." | |
else: | |
return "It's 85 degrees and sunny." | |
tools = [get_weather] | |
tool_node = ToolNode(tools) | |
# model_with_tools = ChatAnthropic( | |
# model="claude-3-haiku-20240307", temperature=0 | |
# ).bind_tools(tools) | |
llm = load_llm_forge() | |
model_with_tools = llm.bind_tools(tools) | |
def should_continue(state: MessagesState): | |
messages = state["messages"] | |
last_message = messages[-1] | |
if last_message.tool_calls: | |
return "tools" | |
return END | |
def call_model(state: MessagesState): | |
messages = state["messages"] | |
response = model_with_tools.invoke(messages) | |
return {"messages": [response]} | |
workflow = StateGraph(MessagesState) | |
# Define the two nodes we will cycle between | |
workflow.add_node("agent", call_model) | |
workflow.add_node("tools", tool_node) | |
workflow.add_edge(START, "agent") | |
workflow.add_conditional_edges("agent", should_continue, ["tools", END]) | |
workflow.add_edge("tools", "agent") | |
graph = workflow.compile() | |
response = graph.invoke({"messages": [("human", "what's the weather in sf?")]}) | |
for m in response["messages"]: | |
m.pretty_print() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment