Skip to content

Instantly share code, notes, and snippets.

@0187773933
Created February 29, 2024 02:20
Show Gist options
  • Save 0187773933/2cc758f8e7843d0a3f84f0cd72fa7730 to your computer and use it in GitHub Desktop.
Save 0187773933/2cc758f8e7843d0a3f84f0cd72fa7730 to your computer and use it in GitHub Desktop.
Code In Phrase Generator
#!/usr/bin/env python3
import sys
import nltk
from nltk.sentiment import SentimentIntensityAnalyzer
from pathlib import Path
from pprint import pprint
import pickle
import nacl.secret # pip install pynacl
import nacl.utils
import base64
import json
import random
import copy
import pandas as pd
import matplotlib.pyplot as plt
# https://colab.research.google.com/drive/1eeOvgqa4HJlu_pY9ljnbm_nLUoYuev5p?usp=sharing
def write_json( file_path , python_object ):
with open( file_path , 'w', encoding='utf-8' ) as f:
json.dump( python_object , f , ensure_ascii=False , indent=4 )
def read_json( file_path ):
with open( file_path ) as f:
return json.load( f )
def write_pickle( file_path , python_object ):
with open( file_path , "wb" ) as f:
pickle.dump( python_object , f , protocol=pickle.HIGHEST_PROTOCOL )
def read_pickle( file_path ):
with open( file_path , "rb" ) as f:
return pickle.load( f )
def nacl_gen_key():
key = nacl.utils.random( nacl.secret.SecretBox.KEY_SIZE )
key_b64 = base64.b64encode( key ).decode( "ascii" )
return key_b64
def nacl_encrypt( key_b64 , plain_text_message ):
key = base64.b64decode( key_b64 )
box = nacl.secret.SecretBox( key )
encrypted = box.encrypt( bytes( plain_text_message , "ascii" ) )
encrypted_b64 = base64.b64encode( encrypted ).decode( "ascii" )
return encrypted_b64
def nacl_decrypt( key_b64 , encrypted_message ):
key_bytes = base64.b64decode( key_b64 )
encrypted_message_bytes = base64.b64decode( encrypted_message )
box = nacl.secret.SecretBox( key_bytes )
plaintext = box.decrypt( encrypted_message_bytes ).decode( "ascii" )
return plaintext
def make_prompt_response_list( list_length=50 , word_minimum_length=6 , sentiment_threshold=0.05 , prompt_name="Code In" , response_a_name="Normal" , response_b_name="Durress" ):
# Load the Brown corpus word list
# nltk.download('brown')
# nltk.download('vader_lexicon')
# nltk.download( "words" , download_dir=Path.cwd().joinpath( "english_dictionary" ) )
# word_list = nltk.corpus.words.words()
word_list = nltk.corpus.brown.words()
# Remove non-alphabetic words and duplicates, then convert to lowercase
word_list = list(set(word.lower() for word in word_list if word.isalpha()))
sia = SentimentIntensityAnalyzer()
positive_word_list = []
for word in word_list:
score = sia.polarity_scores(word)
if score[ "compound" ] >= sentiment_threshold:
positive_word_list.append( word )
word_list = positive_word_list
# Filter words by the minimum length requirement
word_list = [word for word in word_list if len(word) >= word_minimum_length]
# Shuffle the list to ensure randomness
RandomGenerator = random.SystemRandom()
code_in_list = word_list[:]
RandomGenerator.shuffle(code_in_list)
# Extract words for the prompt and two response types
prompt_words = code_in_list[:list_length]
response_a_words = code_in_list[list_length:2*list_length]
response_b_words = code_in_list[2*list_length:3*list_length]
# Create a DataFrame using the specified column names
df = pd.DataFrame({
prompt_name: prompt_words,
response_a_name: response_a_words,
response_b_name: response_b_words,
})
# Generate and save a table image
plt.figure(figsize=(10, 8))
plt.axis('off')
table = plt.table(cellText=df.values, colLabels=df.columns, loc='center', cellLoc='center')
table.auto_set_font_size(True)
table.scale(1, 1.4)
for (row, col), cell in table.get_celld().items():
if row == 0:
cell.set_text_props( weight="bold" , color="black" )
plt.savefig( 'prompt_response_list.png' , bbox_inches='tight' , dpi=300 )
if __name__ == "__main__":
make_prompt_response_list(
list_length=20 ,
word_minimum_length=4 ,
sentiment_threshold=0.05 ,
prompt_name="Prompt" ,
response_a_name="Yes" ,
response_b_name="No"
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment