Skip to content

Instantly share code, notes, and snippets.

@hwchase17
Created December 1, 2023 16:28
Show Gist options
  • Save hwchase17/d5dc031605b7ec383548c0c97520b35a to your computer and use it in GitHub Desktop.
Save hwchase17/d5dc031605b7ec383548c0c97520b35a to your computer and use it in GitHub Desktop.
from langchain.chains.openai_functions import create_structured_output_runnable
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.pydantic_v1 import BaseModel, Field
class Insight(BaseModel):
insight: str = Field(description="""insight""")
chat_model = ChatOpenAI(model_name="gpt-4-1106-preview")
from langchain.prompts import MessagesPlaceholder
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"""
long system message
"""
),
# This is the placeholder for chat history
# We will add messages in the history of the conversation here over time
MessagesPlaceholder(variable_name="history"),
(
"human",
"What's the step with the biggest impact? ```{input}```",
),
(
"human",
"Tip: Make sure to answer in the correct format"
),
]
)
runnable = create_structured_output_runnable(Insight, chat_model, prompt)
# We can call this without any history, but we have to pass in the key
response = runnable.invoke({"input": "Step 1: sign up\n\nStep 2: Do all the work", "history": []})
response
# We can also manually pass in chat history if we want
from langchain.schema.messages import AIMessage, HumanMessage
messages = [HumanMessage(content="hi!"), AIMessage(content="hi!")]
response = runnable.invoke({"input": "Step 1: sign up\n\nStep 2: Do all the work", "history": messages})
# Alternatively, we can use chat history to accumulate that automatically
# We will use an in memory implementation
# This is the in memory storage
from langchain.memory.chat_message_histories import ChatMessageHistory
# This is the runnable that will basically save inputs/outputs to memory
from langchain.schema.runnable.history import RunnableWithMessageHistory
# We need to initialize this upfront
memory = ChatMessageHistory()
chain_with_history = RunnableWithMessageHistory(
# We need to add this extra `(lambda x: x.json())` to the end
# That is because previously the return type was a Pydantic model - we don't know how to save that in history
# In order to save in history, needs to be either a string or a Message type
# If we wanted to do something different, we could! As long as its a string or message
runnable | (lambda x: x.json()),
# Since this is just in memory, session_id doesn't matter
lambda session_id: memory,
input_messages_key="input",
# We need this to align with the `variable_name` in the prompt
history_messages_key="history"
)
chain_with_history.invoke(
{"input": "Step 1: sign up\n\nStep 2: Do all the work"},
# Even though this isn't used by the chat message history, we still need to pass it in
{'configurable': {'session_id': '123'}}
)
memory.messages
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment