Skip to content

Instantly share code, notes, and snippets.

@RageshAntony
Last active May 16, 2023 17:52
Show Gist options
  • Save RageshAntony/b43a6242d215bd4944e6293e69819012 to your computer and use it in GitHub Desktop.
Save RageshAntony/b43a6242d215bd4944e6293e69819012 to your computer and use it in GitHub Desktop.
from colorama import Fore
from langchain.chains import ConversationalRetrievalChain
from langchain.chat_models import ChatOpenAI
from langchain.document_loaders import TextLoader
from langchain.embeddings import HuggingFaceInstructEmbeddings
from langchain.memory import ConversationBufferMemory
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.vectorstores import Chroma
if __name__ == '__main__':
print("status: loading sales document")
loader = TextLoader("./docs/sales_doc.txt")
pages = loader.load_and_split()
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=1000,
chunk_overlap=200,
length_function=len,
)
docs = text_splitter.split_documents(pages)
# Split documents into chunks
gds_data_split = docs
print(len(gds_data_split))
# Define embedding model
OPENAI_API_KEY = "sk-"
embeddings = HuggingFaceInstructEmbeddings(model_name="hkunlp/instructor-xl")
support_data = gds_data_split
support_store = Chroma.from_documents(
support_data, embeddings, collection_name="support"
)
print("status: configure llm")
llm = ChatOpenAI(
model_name="gpt-3.5-turbo",
temperature=0,
openai_api_key=OPENAI_API_KEY,
max_tokens=1024,
)
print("status: confiure chain")
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
support_qa = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=support_store.as_retriever(),
verbose=False,
memory = memory
)
query = "Hi, my name is Ragesh?"
print(Fore.RED + query)
result = support_qa({"question": query})
print(Fore.GREEN + result['answer'])
query = "What is your name?"
print(Fore.RED + query)
result = support_qa({"question": query})
print(Fore.GREEN + result['answer']) # Answering as "Yes, my name is OpenAI language model, "
query = "Do you have Netflix clone?"
print(Fore.RED + query)
result = support_qa({"question": query})
print(Fore.GREEN + result['answer'])
query = "What is the price of that?"
print(Fore.RED + query)
result = support_qa({"question": query})
print(Fore.GREEN + result['answer']) # yeah. remembers the last product.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment