Created
December 23, 2023 20:58
-
-
Save martingaido/6a3e154622ecb03347593b7001c63653 to your computer and use it in GitHub Desktop.
RAG Ollama
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from langchain.llms import Ollama | |
from langchain.document_loaders import WebBaseLoader | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain.embeddings import GPT4AllEmbeddings | |
from langchain.vectorstores import Chroma | |
from langchain.chains import RetrievalQA | |
import gradio as gr | |
# Initialize variables to store the previous URL and its corresponding data and embeddings | |
prev_url = None | |
prev_data = None | |
prev_vectorstore = None | |
def process_url_and_question(url: str, question: str): | |
global prev_url, prev_data, prev_vectorstore | |
if url != prev_url: | |
loader = WebBaseLoader(url) | |
prev_data = loader.load() | |
prev_url = url | |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=20) | |
all_splits = text_splitter.split_documents(prev_data) | |
prev_vectorstore = Chroma.from_documents(documents=all_splits, embedding=GPT4AllEmbeddings()) | |
ollama = Ollama(base_url="http://localhost:11434", model="llama2") | |
qachain = RetrievalQA.from_chain_type(ollama, retriever=prev_vectorstore.as_retriever()) | |
result = qachain({"query": question}) | |
return result | |
iface = gr.Interface(fn=process_url_and_question, | |
inputs=["text", "text"], | |
outputs="text") | |
iface.launch() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment