Skip to content

Instantly share code, notes, and snippets.

@truevis
Last active April 24, 2024 11:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save truevis/c1a447835fbdd0c64f0836228d178e66 to your computer and use it in GitHub Desktop.
Save truevis/c1a447835fbdd0c64f0836228d178e66 to your computer and use it in GitHub Desktop.
Groq API chatbot using Llama3
import streamlit as st
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate
def generate_response(user_input):
chain = prompt | chat
for chunk in chain.stream({"text": "\n".join([f"{role}: {msg}" for role, msg in st.session_state.messages])}):
content = chunk.content
#replace $ in content so no latex
content = content.replace("$", "\\$")
if content:
yield content
st.title("Helpful Assistant")
if "messages" not in st.session_state:
st.session_state.messages = []
# Display previous messages
for role, message in st.session_state.messages:
with st.chat_message(role):
st.markdown(message)
YOUR_API_KEY = "gsk_123"
chat = ChatGroq(temperature=0.5, groq_api_key=YOUR_API_KEY, model_name="llama3-70b-8192")
system = "You are a helpful assistant."
human = "{text}"
prompt = ChatPromptTemplate.from_messages([("system", system), ("human", human)])
if user_prompt := st.chat_input("What can I help you with?"):
st.session_state.messages.append(("user", user_prompt))
print(st.session_state.messages)
st.chat_message("user").markdown(user_prompt)
with st.spinner("Generating response..."):
response = st.write_stream(generate_response(user_prompt))
st.session_state.messages.append(("assistant", response))
@truevis
Copy link
Author

truevis commented Apr 24, 2024

image

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment