Skip to content

Instantly share code, notes, and snippets.

@csiebler
Last active September 19, 2023 04:41
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save csiebler/32f371470c4e717db84a61874e951fa4 to your computer and use it in GitHub Desktop.
Save csiebler/32f371470c4e717db84a61874e951fa4 to your computer and use it in GitHub Desktop.
Using Llama-index with gpt-35-turbo for QNA with Azure OpenAI Service
import os
import openai
from dotenv import load_dotenv
from llama_index import GPTVectorStoreIndex, SimpleDirectoryReader, PromptHelper, LangchainEmbedding, ServiceContext
from llama_index.llms import AzureOpenAI
from langchain.embeddings import OpenAIEmbeddings
# Load environment variables (set OPENAI_API_KEY and OPENAI_API_BASE in .env)
load_dotenv()
# Configure Azure OpenAI Service API
openai.api_type = "azure"
openai.api_base = os.getenv('OPENAI_API_BASE')
openai.api_key = os.getenv("OPENAI_API_KEY")
openai.api_version = os.getenv('OPENAI_API_VERSION')
# openai.log = "debug"
# Initialize LLM and Embeddings model (model is the actual model name, e.g., gpt-35-turbo, engine is your custom deployment name, e.g., my-gpt-35-turbo)
llm = AzureOpenAI(engine="gpt-35-turbo", model="gpt-35-turbo", temperature=0.0)
embeddings = LangchainEmbedding(OpenAIEmbeddings(deployment_id="text-embedding-ada-002", chunk_size=1))
# Define prompt helper
prompt_helper = PromptHelper(context_window=3000,
num_output=500,
chunk_overlap_ratio=0.1,
chunk_size_limit=1000)
service_context = ServiceContext.from_defaults(llm=llm, embed_model=embeddings, prompt_helper=prompt_helper)
# Load documents
documents = SimpleDirectoryReader('data/qna/').load_data()
# Create index
index = GPTVectorStoreIndex.from_documents(documents, service_context=service_context, prompt_helper=prompt_helper)
query_engine = index.as_query_engine(service_context=service_context, verbose=True)
query = "What is azure openai service? give me back a bullet point list"
answer = query_engine.query(query)
print(f"Query: {query}")
print(f"Answer: {answer}")
print(f"Sources: {answer.get_formatted_sources()}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment