Created
December 11, 2023 13:51
-
-
Save betiol/b9204175d3784892c37ae27b04c81890 to your computer and use it in GitHub Desktop.
Chat GPT with your data
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#pip install langchain openai chromadb tiktoken unstructured beautifulsoup4 | |
#how to run: | |
#export OPENAI_API_KEY="sk------" && python3 chatgpt.py https://yoursite.com | |
import sys | |
import requests | |
from bs4 import BeautifulSoup | |
from langchain.document_loaders import TextLoader | |
from langchain.indexes import VectorstoreIndexCreator | |
from langchain.chains import ConversationalRetrievalChain | |
from langchain.chat_models import ChatOpenAI | |
from langchain.embeddings import OpenAIEmbeddings | |
def extract_data_from_url(url): | |
response = requests.get(url) | |
if response.status_code == 200: | |
soup = BeautifulSoup(response.content, 'html.parser') | |
with open("data/data_from_url.txt", "w", encoding="utf-8") as file: | |
file.write(soup.get_text()) | |
if len(sys.argv) > 1: | |
url = sys.argv[1] | |
extract_data_from_url(url) | |
loader = TextLoader("data/data_from_url.txt") | |
index_creator = VectorstoreIndexCreator() | |
index = index_creator.from_loaders([loader]) | |
chain = ConversationalRetrievalChain.from_llm( | |
llm=ChatOpenAI(model="gpt-3.5-turbo"), | |
retriever=index.vectorstore.as_retriever(search_kwargs={"k": 1}), | |
) | |
chat_history = [] | |
while True: | |
query = input("Prompt: ") | |
if query.lower() in ['quit', 'q', 'exit']: | |
break | |
result = chain({"question": query, "chat_history": chat_history}) | |
print(result['answer']) | |
chat_history.append((query, result['answer'])) | |
else: | |
print("Please, add a url as a parameter") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment