Skip to content

Instantly share code, notes, and snippets.

@karthikscale3
Created April 10, 2024 17:57
Show Gist options
  • Save karthikscale3/c13a5b7c53820ee297badd3acaa26d58 to your computer and use it in GitHub Desktop.
Save karthikscale3/c13a5b7c53820ee297badd3acaa26d58 to your computer and use it in GitHub Desktop.
Langtrace and Signoz setup - FastAPI/LangChain/RAG
from fastapi import FastAPI
from langchain_community.vectorstores.faiss import FAISS
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts.chat import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
from openai import OpenAI
from langtrace_python_sdk import langtrace
langtrace.init(write_to_langtrace_cloud=False, debug_log_to_console=True)
app = FastAPI()
client = OpenAI()
@app.get("/")
def root():
vectorstore = FAISS.from_texts(
["Langtrace helps you ship high quality AI Apps to production."], embedding=OpenAIEmbeddings()
)
retriever = vectorstore.as_retriever()
template = """Answer the question based only on the following context:{context}
Question: {question}
"""
prompt = ChatPromptTemplate.from_template(template)
model = ChatOpenAI()
chain = (
{"context": retriever, "question": RunnablePassthrough()}
| prompt
| model
| StrOutputParser()
)
res = chain.invoke("How is Langtrace useful?")
return {"response": res}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment