Skip to content

Instantly share code, notes, and snippets.

@JGalego
Created July 16, 2024 11:27
Show Gist options
  • Save JGalego/8b6cb2482ec511bde4afc5f4fa303dbd to your computer and use it in GitHub Desktop.
Save JGalego/8b6cb2482ec511bde4afc5f4fa303dbd to your computer and use it in GitHub Desktop.
🕶️ ChatGPT-like application for demoing OpenAI-compatible proxies
"""
ChatGPT-like application for demoing OpenAI-compatible proxies.
Adapted from https://docs.streamlit.io/develop/tutorials/llms/build-conversational-apps
"""
import os
import streamlit as st
from openai import OpenAI
st.title("Agent Smith 🕶️")
st.sidebar.markdown("""
#### What is Agent Smith?
Agent Smith is ChatGPT-like clone for demoing OpenAI-compatible LLM proxies.
<img src="https://i.pinimg.com/originals/9f/c9/7a/9fc97aab2d432f1e33d2231e6bd6c3d4.gif" width="70%"/>
####
""", unsafe_allow_html=True)
# Initialize client
client = OpenAI(
base_url=st.secrets["OPENAI_BASE_URL"],
api_key=st.secrets["OPENAI_API_KEY"]
)
# List models
models = [model.id for model in client.models.list()]
# Select model
model = st.sidebar.selectbox(
label="Select a model",
options=models,
index=models.index(os.environ['DEFAULT_AGENT_SMITH_MODEL'])
if 'DEFAULT_AGENT_SMITH_MODEL' in os.environ else 0
)
# Add examples
st.sidebar.markdown("#### Examples")
examples = [
"Who are you?",
"What is your favorite movie?",
"In a fight between you and the Architect, who would win?",
"Kiss, marry, kill: Trinity, Neo, the Oracle. Go!"
]
for example in examples:
st.sidebar.code(example, language=None)
# Initialize message history
if "messages" not in st.session_state:
st.session_state.messages = [{
'role': "system",
'content':
"You are Agent Smith from The Matrix franchise. Act and talk like him. Be theatrical."
}]
# Display old messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Get user input
if prompt := st.chat_input("You hear that, Mr Anderson? That is the sound of inevitability!"):
# Update message history
st.session_state.messages.append({"role": "user", "content": prompt})
# Display user ptompt
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
# Call model
stream = client.chat.completions.create(
model=model,
messages=st.session_state.messages,
temperature=0.0,
stream=True,
)
# Display response
response = st.write_stream(stream)
# Update message history
st.session_state.messages.append({"role": "assistant", "content": response})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment