Skip to content

Instantly share code, notes, and snippets.

@hightemp
Created December 22, 2023 08:21
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save hightemp/46fcb48e063d76206ec0cfde2837fd4b to your computer and use it in GitHub Desktop.
Save hightemp/46fcb48e063d76206ec0cfde2837fd4b to your computer and use it in GitHub Desktop.
How To Chat With A Github Repository Using Llama-index
# Import the Github Repository Reader class
import os
from tokens import githubToken, apikey
import openai
from llama_index.readers import GithubRepositoryReader
from llama_index import VectorStoreIndex
from llama_index import StorageContext, load_index_from_storage
openai.api_key = apikey
os.environ["GITHUB_TOKEN"] = githubToken
folder_path = "./storage"
folder_exists = os.path.exists(folder_path) and os.path.isdir(folder_path)
if folder_exists:
# rebuild storage context
storage_context = StorageContext.from_defaults(persist_dir="./storage")
# load index
index = load_index_from_storage(storage_context)
else:
# Read the llama-index repository
reader = GithubRepositoryReader("jerryjliu",
"llama_index",
ignore_directories=[".github", ".vscode",
"benchmarks", "docs",
"examples", "experimental",
"scripts", "tests"])
# Load data from the main branch
branch_documents = reader.load_data(branch="main")
# Building and querying an index
index = VectorStoreIndex.from_documents(branch_documents)
index.storage_context.persist()
query_engine = index.as_query_engine()
while True:
conversation = {}
question = input("\n Write your question or enter 'quit' to quit. \n\n")
conversation[question] = ""
if question == 'quit':
break
prompt = f"Respond to this question: {question} given the conversation history: {conversation} \n"
response = query_engine.query(prompt)
conversation[question] = response
print(response)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment