Skip to content

Instantly share code, notes, and snippets.

View harshitahluwalia7895's full-sized avatar
🏠
Working from home

Harshit Ahluwalia harshitahluwalia7895

🏠
Working from home
View GitHub Profile
import streamlit as st
import os
import pandas as pd
from uuid import uuid4
import psycopg2
from langchain.prompts import ChatPromptTemplate
from langchain.prompts.chat import SystemMessage, HumanMessagePromptTemplate
from langchain.llms import OpenAI
from langchain.chat_models import ChatOpenAI
import streamlit as st
import requests
import os
import pandas as pd
from uuid import uuid4
import psycopg2
from langchain.prompts import ChatPromptTemplate
from langchain.prompts.chat import SystemMessage, HumanMessagePromptTemplate
from langchain.llms import OpenAI, AzureOpenAI
def create_sql_query_template(query, relevant_tables, table_info):
tables_list = ",".join(relevant_tables)
chat_template = ChatPromptTemplate.from_messages([
SystemMessage(
content=(
f"As an assistant capable of composing SQL queries, please write a query that resolves the user's inquiry based on the text provided. "
f"Consider SQL tables named '{tables_list}'. "
f"Below is a detailed description of these table(s): "
f"{table_info}"
"Enclose the SQL query within three backticks '```' for proper formatting."
# Initialize the vector database for storing table embeddings
vectordb = Chroma(embedding_function=embeddings, persist_directory=f"./vectors/tables_{unique_id}")
retriever = vectordb.as_retriever()
docs = retriever.get_relevant_documents(query)
print(docs)
# Collecting the relevant tables and their columns
relevant_tables = []
relevant_table_details = []
import streamlit as st
import requests
import os
import pandas as pd
from uuid import uuid4
import psycopg2
from langchain.prompts import ChatPromptTemplate
from langchain.prompts.chat import SystemMessage, HumanMessagePromptTemplate
def generate_sql_query_prompt(query, db_uri):
# Configure the chat template with system and human messages
prompt_template = ChatPromptTemplate.from_messages([
SystemMessage(
content=(
"As an assistant tasked with writing SQL queries, create a SQL query based on the text below. "
"Enclose the SQL query within three backticks '```' for clarity. "
"Aim to use 'SELECT' queries as much as possible. "
f"The connection string for the database is {db_uri}."
)
def check_user_intent_for_database_info_or_sql(query):
# Define a template for the conversation
prompt_template = ChatPromptTemplate.from_messages([
SystemMessage(
content=(
"Based on the provided text, the user is asking a question about databases. "
"Determine if the user seeks information about the database schema or if they want to write a SQL query. "
"Respond with 'yes' if the user is seeking information about the database schema and 'no' if they intend to write a SQL query."
)
),
def generate_embeddings(filename, storage_folder):
csv_loader = CSVLoader(file_path=filename, encoding="utf8")
dataset = csv_loader.load()
vector_database = Chroma.from_documents(dataset, embedding=embeddings, persist_directory=storage_folder)
vector_database.persist()
import streamlit as st
import requests
import os
import pandas as pd
from uuid import uuid4
import psycopg2
from langchain.prompts import ChatPromptTemplate
from langchain.prompts.chat import SystemMessage, HumanMessagePromptTemplate
from langchain.llms import OpenAI, AzureOpenAI
from langchain.chat_models import ChatOpenAI, AzureChatOpenAI
!pip install openai==1.12.0
!pip install gradio==4.19.0