Skip to content

Instantly share code, notes, and snippets.

@JosephCastro
Last active July 4, 2024 02:31
Show Gist options
  • Save JosephCastro/8942fdf3bb0a4baa8a85a2f58eea27dd to your computer and use it in GitHub Desktop.
Save JosephCastro/8942fdf3bb0a4baa8a85a2f58eea27dd to your computer and use it in GitHub Desktop.
from langchain.memory.chat_message_histories.redis import RedisChatMessageHistory
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_core.output_parsers import StrOutputParser
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.prompts import PromptTemplate
from langchain.chains import LLMChain
from google.colab import userdata
import os
langchain_api_key = userdata.get('LANGCHAIN_API_KEY')
os.environ["LANGCHAIN_API_KEY"] = langchain_api_key
os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ['OPENAI_API_KEY'] = userdata.get('OPENAI_API_KEY')
redis_url = userdata.get('REDIS_URL')
class MemoryManagement:
def __init__(self, session_id):
self.session_id = session_id
self.history = None
def get_id(self):
return self.session_id
def get_memory(self, session_id=None):
if not self.history:
self.history = RedisChatMessageHistory(session_id=self.session_id, url=redis_url)
return self.history
def add_message(self, role, message):
self.get_memory(self.session_id)
if role == 'user':
self.history.add_user_message(message)
elif role == 'system':
self.history.add_system_message(message)
else: #ai
self.history.add_ai_message(message)
class AiContextResponse:
model = ChatOpenAI(model="gpt-4o")
def __init__(self):
self.prompt_template = PromptTemplate(
template="Como un asistente de IA, responderás preguntas de acuerdo con el siguiente historial de conversación: \n'''{chat_history}'''. \nPregunta: '''{input}'''",
input_variables=['chat_history', 'input']
)
def response(self, message, session_id,memory_call):
chain = self.prompt_template | AiContextResponse.model | StrOutputParser()
chain_with_history = RunnableWithMessageHistory(
runnable = chain,
get_session_history=memory_call,
input_messages_key='input',
history_messages_key='chat_history'
)
return chain_with_history.invoke({'input': message}, {'configurable': {'session_id': session_id}})
class ChatManagement:
def __init__(self):
self.memory = None
self.status = 'logout'
def __login_command__(self, message):
token = message.split(' ')
if token[0] == 'login' and token[1] is not None:
self.status = 'login'
self.memory = MemoryManagement(token[1])
return True
return False
def get_history(self, token):
return MemoryManagement(token)
def __logout_command__(self, message):
token = message.split(' ')
if token[0] == 'logout':
self.status = 'logout'
self.memory = None
return True
return False
def process_message(self, message):
if self.status != 'login':
return self.__login_command__(message)
if self.__logout_command__(message):
return False
# save message
self.memory.add_message('user', message)
#response
response = AiContextResponse().response(message, self.memory.get_id(), self.memory.get_memory)
self.memory.add_message('ai', response)
return response
def chat(self):
while True:
question = input("Pregunta: ")
answer = self.process_message(question)
if not answer: break
print(f'Respuesta: {answer}')
import chat.py
# Para chatear, debes hacer: login ID
# Donde ID es cualquier palabra que sirva como ID, ejemplo: JUAN
# Para terminar, escribe logout
bot = ChatManagement()
bot.chat()
# Para obtener la historia, solo agrega el ID de quien quieres ver el historico
mm = bot.get_history('---')
print(mm.get_memory())
# TODO: La historia se repite, se escribe dos veces lo mismo.
langchain
langchain-openai
langchain-community
langchain-core
faiss-gpu
redis
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment