Skip to content

Instantly share code, notes, and snippets.

@JGalego
Last active May 7, 2024 01:37
Show Gist options
  • Save JGalego/6d793f7ca1caec5bcd04edcd6c72f8de to your computer and use it in GitHub Desktop.
Save JGalego/6d793f7ca1caec5bcd04edcd6c72f8de to your computer and use it in GitHub Desktop.
Job interviewer πŸ§‘πŸ»β€πŸ’Ό powered by Amazon Bedrock / Claude
r"""
____ _ _
| _ \ | | | |
| |_) | ___ __| |_ __ ___ ___| | __
| _ < / _ \/ _` | '__/ _ \ / __| |/ /
| |_) | __/ (_| | | | (_) | (__| <
|____/ \___|\__,_|_| \___/ \___|_|\_\
|_ _| | | (_)
| | _ __ | |_ ___ _ ____ ___ _____ _____ _ __
| | | '_ \| __/ _ \ '__\ \ / / |/ _ \ \ /\ / / _ \ '__|
_| |_| | | | || __/ | \ V /| | __/\ V V / __/ |
|_____|_| |_|\__\___|_| \_/ |_|\___| \_/\_/ \___|_|
(Streamlit edition)
"""
import os
import uuid
import boto3
import streamlit as st
from langchain_community.chat_message_histories import DynamoDBChatMessageHistory
from langchain_community.chat_models import BedrockChat
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.prompts.chat import MessagesPlaceholder
from langchain.schema import (
AIMessage,
HumanMessage,
)
from PyPDF2 import PdfReader
# Constants
INTERVIEWER_TABLE = "BedrockInterviewer"
SESSION_ID = uuid.UUID(int=uuid.getnode()).hex
DEFAULT_JOB_DESCRIPTION="job_description.txt"
if os.path.isfile(DEFAULT_JOB_DESCRIPTION):
with open(DEFAULT_JOB_DESCRIPTION, 'r', encoding="utf-8") as default_job_description_f:
lines = default_job_description_f.readlines()
DEFAULT_JOB_ROLE = lines[0].strip()
DEFAULT_JOB_DESCRIPTION = "".join(lines[1:])
else:
DEFAULT_JOB_ROLE = None
DEFAULT_JOB_DESCRIPTION = None
# Prompts
SYSTEM_PROMPT = """
You are going to act as a job interviewer. Your goal is to ask me interview questions for a '{job_role}' position and assess whether I'm a good fit for the role. Ask one question at a time and one question only.
## Job Description
{job_description}
## Candidate CV
{candidate_cv}
"""
# Create a DynamoDB table to hold the conversation
dynamodb_client = boto3.client("dynamodb")
dynamodb = boto3.resource("dynamodb")
try:
table = dynamodb.create_table(
TableName=INTERVIEWER_TABLE,
KeySchema=[{
"AttributeName": "SessionId",
"KeyType": "HASH"
}],
AttributeDefinitions=[{
"AttributeName": "SessionId",
"AttributeType": "S"
}],
BillingMode="PAY_PER_REQUEST",
)
except dynamodb_client.exceptions.ResourceInUseException:
table = dynamodb.Table(INTERVIEWER_TABLE)
# Wait until the table exists
table.meta.client.get_waiter("table_exists").wait(TableName=INTERVIEWER_TABLE)
# Make some history!
history = DynamoDBChatMessageHistory(
table_name=INTERVIEWER_TABLE,
session_id=SESSION_ID,
)
if len(history.messages) == 0:
history.add_messages([
HumanMessage(content="I'm ready to start the interview.")
])
####################
# Helper Functions #
####################
@st.cache_data
def list_claude_models():
bedrock = boto3.client("bedrock")
return bedrock.list_foundation_models(
byProvider="anthropic",
byInferenceType="ON_DEMAND"
)['modelSummaries']
def process_cv():
"""Converts the candidate CV into a string"""
reader = PdfReader(st.session_state.uploaded_file)
st.session_state.candidate_cv = "".join([p.extract_text().strip() for p in reader.pages])
def reset_interview():
"""Stops the interview and deletes the entire conversation history"""
st.session_state.interview_in_progress=False
history.clear()
def process_chat_history():
"""Converts the chat history into something Streamlit can consume"""
messages = []
for message in history.messages:
if isinstance(message, AIMessage):
messages.append({
'role': "assistant",
'content': message.content
})
elif isinstance(message, HumanMessage):
messages.append({
'role': "human",
'content': message.content
})
else:
pass
return messages
def show_chat_history():
"""Display the entire interview history"""
for message in process_chat_history():
with st.chat_message(message["role"], avatar=message.get("avatar", None)):
st.markdown(message["content"])
def start_interview():
"""Jumpstarts the job interview"""
if "uploaded_file" not in st.session_state or not st.session_state.uploaded_file:
st.error("No file uploaded!")
return
if st.session_state.job_role == "":
st.error("Job role must not be empty!")
return
if st.session_state.job_description == "":
st.error("Job description must not be empty!")
return
process_cv()
st.session_state.interview_in_progress = True
########
# Main #
########
st.title("Bedrock Interviewer πŸ§‘πŸ»β€πŸ’Ό")
if "interview_in_progress" not in st.session_state:
st.session_state.interview_in_progress = False
if not st.session_state.interview_in_progress:
st.markdown("""
**Bedrock Interviewer** is a simple chat application powered by [Amazon Bedrock](https://aws.amazon.com/bedrock/) that mimics a job interviewer. Its goal is to assess whether a candidate is a good fit for a job role by asking interview questions.
""")
st.session_state.model = st.selectbox(
label="Model",
options=list_claude_models(),
format_func=lambda model: model['modelName']
)
# Create the prompt templates
prompt = ChatPromptTemplate.from_messages(
[
("system", SYSTEM_PROMPT),
MessagesPlaceholder(variable_name="chat_history"),
]
)
# Initialize the chat model
chat = BedrockChat(
model_id=st.session_state.model['modelId'],
model_kwargs={
'temperature': 0.1,
'stop_sequences': [
"\\n\\nHuman:",
"\\n\\nCandidate:"
]
}
)
# Create the chain
chain = prompt | chat
if not st.session_state.interview_in_progress:
st.session_state.job_role = st.text_input(
label="Job Role",
value=DEFAULT_JOB_ROLE,
)
st.session_state.job_description = st.text_area(
label="Job Description",
value=DEFAULT_JOB_DESCRIPTION,
)
st.session_state.uploaded_file = st.file_uploader(
label="Upload CV",
type=["pdf"],
)
st.button(
label="Start Interview 🀞",
on_click=start_interview,
)
else:
show_chat_history()
if len(history.messages) == 0 or \
isinstance(history.messages[-1], HumanMessage):
with st.spinner(f"{st.session_state.model['modelName']} is thinking... πŸ’­"):
question = chain.invoke({
'job_role': st.session_state.job_role,
'job_description': st.session_state.job_description,
'candidate_cv': st.session_state.candidate_cv,
'chat_history': history.messages,
})
history.add_ai_message(question)
with st.chat_message("assistant"):
st.markdown(question.content)
if answer := st.chat_input():
with st.chat_message("human"):
history.add_user_message(answer)
st.markdown(answer)
st.rerun()
st.session_state.model = st.sidebar.selectbox(
label="Model",
options=list_claude_models(),
format_func=lambda model: model['modelName']
)
reset_interview = st.sidebar.button(
label="Reset Interview ⚠️",
on_click=reset_interview,
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment