Skip to content

Instantly share code, notes, and snippets.

@JGalego
Last active November 20, 2023 15:05
Show Gist options
  • Save JGalego/48d744d19fc83fbc2c5455b25f05eac4 to your computer and use it in GitHub Desktop.
Save JGalego/48d744d19fc83fbc2c5455b25f05eac4 to your computer and use it in GitHub Desktop.
A simple Streamlit app to chat with JCVD (powered by Claude v2) using Amazon Bedrock
# pylint: disable=line-too-long
"""
JCVD Chat 🕺💬
A simple Streamlit app to chat with JCVD (powered by Claude v2) using Amazon Bedrock
https://aws.amazon.com/bedrock/claude/
> Adapted from Streamlit's Build conversational apps
https://docs.streamlit.io/knowledge-base/tutorials/build-conversational-apps
Want to learn more about Anthropic Claude? Check out
https://www.anthropic.com/index/introducing-claude
⠀⢰⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⣶⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
⠀⢸⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
⠀⢸⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⠀⣶⣶⣶⣶⣶⣶⣶⡆⠀
⠀⢸⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⠀⣿⣿⣿⣿⣿⣿⣿⡇⠀
⠀⢸⣿⣿⣿⣿⣧⣤⣿⣿⣧⣼⣿⣿⣤⣿⣿⣿⣿⣿⠀⣿⣿⣿⣿⣿⣿⣿⡇⠀
⠀⢸⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⠀⣿⣿⣿⣿⣿⣿⣿⡇⠀
⠀⠈⠉⠉⠉⢹⣿⣿⡿⠋⣉⣉⣉⣉⣉⣉⣉⣉⣉⣉⣀⣿⣿⣿⣿⣿⣿⣿⡇⠀
⠀⠀⠀⠀⠀⠈⣿⡟⠁⠀⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⡇⠀
⠀⠀⠀⠀⠀⠀⠛⠀⠀⠀⠉⠉⠉⠉⠉⠉⠉⠉⠉⠉⠉⢿⣿⣿⡏⠉⠉⠉⠁⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⢻⣿⠁⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠛
"""
import json
import time
import boto3
import streamlit as st
st.title("JCVD Chat 🕺💬")
st.subheader("I am the Fred Astaire of chatbots!")
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
###########################
# Inference configuration #
###########################
st.sidebar.title("Inference Configuration")
if st.sidebar.button('Clear chat history'):
st.session_state.messages = []
st.sidebar.subheader("Randomness & Diversity")
st.sidebar.slider(
label='Temperature',
key='temperature',
help='Amount of randomness injected into the response',
value=1.0,
min_value=0.1,
max_value=1.0,
step=0.1,
format='%.1f',
)
st.sidebar.slider(
label='Top P',
key='top_p',
help='Use nucleus sampling. In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by top_p',
value=0.999,
min_value=0.0,
max_value=1.0,
step=0.001,
format='%.3f',
)
st.sidebar.slider(
label='Top K',
key='top_k',
help='Only sample from the top K options for each subsequent token',
value=250,
min_value=0,
max_value=500,
step=1,
format='%d',
)
st.sidebar.subheader("Length")
st.sidebar.slider(
label='Length',
key='length',
help='The maximum number of tokens to generate before stopping',
value=300,
min_value=0,
max_value=2048,
step=1,
format='%d',
)
###########
# Chat UI #
###########
# Initialize Bedrock client
session = boto3.Session()
bedrock_runtime = session.client('bedrock-runtime')
# Display messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Prompt user for input
if prompt := st.chat_input("What's up, doc?"):
# Add user prompt to chat history
st.session_state.messages.append(
{
"role": "user",
"content": prompt
}
)
# Display user prompt
with st.chat_message("user"):
st.markdown(prompt)
# Display assistant response
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = "" # pylint: disable=invalid-name
response = bedrock_runtime.invoke_model_with_response_stream(
accept="*/*",
body=json.dumps(
{
"prompt": f"Human: {prompt}\\nInstructions: Imagine you are JCVD\\nAssistant:",
"max_tokens_to_sample": st.session_state.length,
"temperature": st.session_state.temperature,
"top_k": st.session_state.top_k,
"top_p": st.session_state.top_p,
"stop_sequences": [
"\\n\\nHuman:"
],
#"anthropic_version": "bedrock-2023-05-31",
}
),
contentType="application/json",
modelId="anthropic.claude-v2"
)
stream = response.get('body')
if stream:
for event in stream:
chunk = event.get('chunk')
if chunk:
chunk_obj = json.loads(chunk.get('bytes').decode())
text = chunk_obj['completion']
full_response += text
# Add a blinking cursor to simulate typing
time.sleep(0.05)
message_placeholder.markdown(full_response + "▌")
message_placeholder.markdown(full_response)
message_placeholder.markdown(full_response)
# Add assistant response to chat history
st.session_state.messages.append(
{
"role": "assistant",
"content": full_response
}
)
# Display dancing JCVD
if not st.session_state.messages:
st.markdown('![](https://media.tenor.com/CVp9l7g3axwAAAAj/jean-claude-van-damme-jcvd.gif)')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment