Skip to content

Instantly share code, notes, and snippets.

@fsndzomga
Created September 2, 2023 22:26
Show Gist options
  • Save fsndzomga/eb105e2e334630d6e04d23c7962e8282 to your computer and use it in GitHub Desktop.
Save fsndzomga/eb105e2e334630d6e04d23c7962e8282 to your computer and use it in GitHub Desktop.
This script creates a multilingual, context-aware question-answering system using LangChain and the OpenAI API, capable of responding to queries based on a given biography of Barack Obama.
from langchain.vectorstores import Chroma
from langchain.embeddings import OpenAIEmbeddings
from langchain.schema.runnable import RunnablePassthrough
from langchain.schema.output_parser import StrOutputParser
from langchain.prompts import ChatPromptTemplate
from langchain.chat_models import ChatOpenAI
from operator import itemgetter
from apikey import OPENAI_API_KEY
import os
# Set the OpenAI API key
os.environ['OPENAI_API_KEY'] = OPENAI_API_KEY
# Initialize the ChatOpenAI model
model = ChatOpenAI()
# Create a long text about Barack Obama to serve as the context
obama_text = """
Barack Obama served as the 44th President of the United States from 2009 to 2017.
He was born in Honolulu, Hawaii, on August 4, 1961. Obama is a graduate of Columbia University
and Harvard Law School, where he served as president of the Harvard Law Review. He was a community
organizer in Chicago before earning his law degree and worked as a civil rights attorney and taught
constitutional law at the University of Chicago Law School between 1992 and 2004. He served three
terms representing the 13th District in the Illinois Senate from 1997 until 2004, when he ran for the
U.S. Senate. Obama received the Nobel Peace Prize in 2009.
"""
# Create the retriever with the Obama text as the context
vectorstore = Chroma.from_texts([obama_text], embedding=OpenAIEmbeddings())
retriever = vectorstore.as_retriever()
# Define the prompt template
template = """Answer the question based only on the following context:
{context}
Question: {question}
"""
prompt = ChatPromptTemplate.from_template(template)
# Create the chain for answering questions
chain = (
{"context": retriever, "question": RunnablePassthrough()}
| prompt
| model
| StrOutputParser()
)
# Invoke the chain to answer a question
print(chain.invoke("When was Barack Obama born?"))
# Create a new prompt template that allows for translation
template_with_language = """Answer the question based only on the following context:
{context}
Question: {question}
Answer in the following language: {language}
"""
prompt_with_language = ChatPromptTemplate.from_template(template_with_language)
# Create the chain for answering questions in different languages
chain_with_language = {
"context": itemgetter("question") | retriever,
"question": itemgetter("question"),
"language": itemgetter("language")
} | prompt_with_language | model | StrOutputParser()
# Invoke the chain to answer a question in Italian
print(chain_with_language.invoke({"question": "When was Barack Obama born?", "language": "italian"}))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment