Skip to content

Instantly share code, notes, and snippets.

@Laurian
Created March 23, 2024 06:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Laurian/cd4200896d90a0dc3fe1232316afed67 to your computer and use it in GitHub Desktop.
Save Laurian/cd4200896d90a0dc3fe1232316afed67 to your computer and use it in GitHub Desktop.
using PostgresChatMessageHistory
from dotenv import dotenv_values
import os
from typing import List, Tuple
from langchain.callbacks.tracers import ConsoleCallbackHandler
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.runnables import (
RunnableParallel,
)
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_openai import ChatOpenAI
from langchain.memory import PostgresChatMessageHistory
config = {
**dotenv_values(".env.shared"), # load shared development variables
**dotenv_values(".env.secret"), # load sensitive variables
**dotenv_values(".env.local"), # load sensitive variables
**dotenv_values(".env.production"), # load sensitive variables
**os.environ, # override loaded values with environment variables
}
# LangChain and OpenAI want these environment variables
os.environ["LANGCHAIN_TRACING_V2"] = config.get("LANGCHAIN_TRACING_V2")
os.environ["LANGCHAIN_ENDPOINT"] = config.get("LANGCHAIN_ENDPOINT")
os.environ["LANGCHAIN_API_KEY"] = config.get("LANGCHAIN_API_KEY")
os.environ["LANGCHAIN_PROJECT"] = config.get("LANGCHAIN_PROJECT")
os.environ["OPENAI_API_KEY"] = config.get("OPENAI_API_KEY")
# Answer synthesis prompt
template = """Answer the user's question below. Be polite and helpful:"""
ANSWER_PROMPT = ChatPromptTemplate.from_messages(
[
("system", template),
MessagesPlaceholder(variable_name="chat_history"),
("user", "{question}"),
]
)
# User input
class ChatHistory(BaseModel):
chat_history: List[Tuple[str, str]] = Field(..., extra={"widget": {"type": "chat"}})
question: str
session_id: str
_inputs = RunnableParallel(
{
"question": lambda x: x["question"],
"chat_history": lambda x: x["chat_history"],
},
).with_types(input_type=ChatHistory)
chain = RunnableWithMessageHistory(
_inputs | ANSWER_PROMPT | ChatOpenAI() | StrOutputParser(),
lambda session_id: PostgresChatMessageHistory(
connection_string=config.get("POSTGRES_CONNECTION_STRING"),
session_id=session_id,
),
input_messages_key="question",
history_messages_key="chat_history",
).with_config(callbacks=[ConsoleCallbackHandler()])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment