Skip to content

Instantly share code, notes, and snippets.

@BenLiyanage
Created September 29, 2023 21:13
Show Gist options
  • Save BenLiyanage/08796c372688d444c4cd7d7255d319bd to your computer and use it in GitHub Desktop.
Save BenLiyanage/08796c372688d444c4cd7d7255d319bd to your computer and use it in GitHub Desktop.
lever candidate evaluation
import requests
from requests.auth import HTTPBasicAuth
from environs import Env
import openai
import ast
import csv
# Initialize environs and OpenAI
env = Env()
env.read_env()
openai.api_key = env("OPENAI_API_KEY")
# Constants
LEVER_API_KEY = env("LEVER_API_KEY")
POSTING_ID = '56db654f-aa40-43f8-a55c-86852bacd064'
# Endpoint, Headers, and Auth
URL_OPPORTUNITIES = "https://api.lever.co/v1/opportunities"
URL_POSTINGS = f"https://api.lever.co/v1/postings/{POSTING_ID}"
URL_RESUMES = "https://api.lever.co/v1/opportunities/{}/resumes"
basic_auth = HTTPBasicAuth(LEVER_API_KEY, '')
def fetch_job_description_from_lever():
response = requests.get(URL_POSTINGS, auth=basic_auth)
posting_data = response.json()["data"]
job_description = posting_data["content"]["description"]
job_requirements = ' '.join([item["text"] for item in posting_data["content"]["lists"]])
job_closing = posting_data["content"]["closing"]
return job_description + ' ' + job_requirements + ' ' + job_closing
def fetch_resumes_for_applicant(opportunity_id):
url = URL_RESUMES.format(opportunity_id)
response = requests.get(url, auth=basic_auth)
if response.status_code == 200:
return response.json()["data"]
else:
print(f"Error {response.status_code} fetching resumes for applicant {opportunity_id}")
return []
def get_resume_data(opportunity_id):
URL_RESUMES = f"https://api.lever.co/v1/opportunities/{opportunity_id}/resumes"
response = requests.get(URL_RESUMES, auth=basic_auth)
if response.status_code == 200:
resumes = response.json()["data"]
# For simplicity, we'll consider the latest (first) resume in the list.
if resumes and 'parsedData' in resumes[0]:
return resumes[0]['parsedData']
return None
import pprint
import pprint
def evaluate_applicant(applicant, job_description):
# Fetch resume data for the applicant
resume_data = get_resume_data(applicant["id"])
evaluation_data = f"Applicant Profile: {applicant['headline']} "
if resume_data:
# TODO Add job duration to the profile
for position in resume_data["positions"]:
evaluation_data += f"Position at {position['org']}: {position['title']} {position['summary']} "
for school in resume_data["schools"]:
evaluation_data += f"Education at {school['org']}: {school['degree']} in {school['field']} {school['summary']} "
# Direct Lookup URL
lookup_url = applicant['urls']['show']
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "system", "content": f"Job Description: {job_description}"},
{"role": "system", "content": f"Provide a structured response in valid JSON format with keys and string values enclosed in double quotes. This should be directly parsable using Python's json.loads() method. Do not escape single quotes. The response should have this structure: {{\"classification\": \"One of ['Strong Match', 'Weak Match', 'Not Qualified', 'Manual Review Needed']\", \"justification\": \"A short sentence justifying this candidates strengths.\", \"email_content\": \"A concise email message\"}}"},
{"role": "user", "content": f"Evaluate the following profile: {evaluation_data}"},
]
# Debug print before making the OpenAI call
print("\n\nDebug Info:")
pprint.pprint(messages)
response = openai.ChatCompletion.create(model="gpt-4-0613", messages=messages)
response_text = response.choices[0].message['content'].strip()
print(f"\n\nResponse: {response_text}")
try:
structured_response = ast.literal_eval(response_text)
classification = structured_response["classification"]
justification = structured_response["justification"]
email_content = structured_response["email_content"]
except:
# Default fallback in case the LLM does not return the expected structure
classification = "Unknown"
justification = "The LLM did not provide a valid structured response."
email_content = ""
result = {
"url": lookup_url,
"classification": classification,
"justification": justification,
"email_content": email_content
}
print("\n\nResult:")
pprint.pprint(result)
return result
def main():
job_description = fetch_job_description_from_lever()
params = {"posting_id": POSTING_ID, "stage_id": "applicant-new", "pipeline": "applicant"}
response = requests.get(URL_OPPORTUNITIES, auth=basic_auth, params=params)
if response.status_code == 200:
applicants = response.json()["data"]
# Open a CSV file in write mode
with open("evaluations.csv", "w", newline='') as csv_file:
# Define the fieldnames based on the keys of the evaluation
fieldnames = ["url", "classification", "justification", "email_content"]
writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
# Write the headers to the CSV file
writer.writeheader()
for applicant in applicants:
evaluation = evaluate_applicant(applicant, job_description)
# Write the evaluation to the CSV file
writer.writerow(evaluation)
print(evaluation) # Print the evaluation to the console
else:
print(f"Error {response.status_code}")
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment