Skip to content

Instantly share code, notes, and snippets.

@oaustegard
Last active December 15, 2022 05:24
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save oaustegard/720eacd5675079949377401b7c6300f2 to your computer and use it in GitHub Desktop.
Save oaustegard/720eacd5675079949377401b7c6300f2 to your computer and use it in GitHub Desktop.
GPT3_Musings
# %% Get text from the command line parameter, then get a far more complex version of the text from GPT-3
import sys
import os
import openai
import GPT3Helper
def get_complexity(prompt):
full_prompt = f'''\
Rewrite the following using grandiose and complex business-jargon-filled \
language, in many more words and more sentences than necessary:
Original: {prompt}
Rewrite:'''
return GPT3Helper.askGPT3(full_prompt, max_tokens=200, frequency_penalty=0.2, presence_penalty=0.2)
if __name__ == "__main__":
# get the prompt from the command line parameter
prompt = sys.argv[1]
print(get_complexity(prompt))
# %%
# %% Get the text on the clipboard, then get the ELI5 explanation for that from GPT-3
import klembord
import complexifier
klembord.init()
try:
text = klembord.get_text().rstrip('\x00')
print(complexifier.get_complexity(text))
except Exception as e:
print(e)
"""
@DCWeatherBard is a Twitter account that tweets the DC weather forecast in the
style of an old English poem: http://twitter.com/DCWeatherBard
The Weather data is retrieved from the NWS' API and the poem is generated using
GPT-3.
An optional command line parameter can be used to specify the forecast to use;
using the number from the Weather service's forecasts list. 1 is the nearest
forecast, typically intra-day, 2 or 3 is the next day, etc.
TODO: combine with GrouchyForecast and create multiple voices for same weather data
"""
# %% Get the weather forecast
from GPT3Helper import askGPT3
from Weather_Util import get_dc_weather_forecast
from datetime import date
def get_poem(period_name, detailed_forecast):
"""
Generate the poem using GPT-3
Args:
period_name (str): The name of the forecast period
detailed_forecast (str): The detailed forecast
Returns:
str: The poem
"""
prompt = f"""The weather forecast for {period_name} ({date.today()}) in Washington DC is '{detailed_forecast}'.
Write three separate 6-line Old-English style Ghazal poems about {period_name}'s weather forecast, \
mentioning specific spelled-out numeric details as well as suitable activities for DC locals \
given the season we're in, day of the week and the weather.
Subsequently, give me a set of hashtags that suit the message of each poem."""
return askGPT3(prompt, temperature=0.7, max_tokens=300, frequency_penalty=0.5, presence_penalty=0.3)
# %%
# Main: Capture the period as an command line argument parameter
if __name__ == "__main__":
"""
This code captures optional command line arguments for period, period_name and detailed_forecast
If a period is specified use that to get the weather forecast for that period,
If a period_name and detailed_forecast are specified use those instead of getting the weather forecast
"""
import argparse
# capture the command line arguments
parser = argparse.ArgumentParser(description="DC Weather Bard")
parser.add_argument("--period", "-p", type=int, default=1,
help="The period to get the forecast for, 1 is the nearest forecast, typically intra-day, 2 or 3 is the next day, etc.")
parser.add_argument("--period_name", "-n", type=str, default=None,
help="The name of the overridden forecast period to use")
parser.add_argument("--detailed_forecast", "-f", type=str, default=None,
help="The overridden detailed forecast to use")
args = parser.parse_args()
# determine which option to go with
if args.period_name is not None and args.detailed_forecast is not None:
# use the overridden period_name and detailed_forecast
period_name = args.period_name
detailed_forecast = args.detailed_forecast
else:
# use the period specified (or its default value of 1) to get the weather forecast
period_name, detailed_forecast = get_dc_weather_forecast(args.period)
poem = get_poem(period_name, detailed_forecast)
print(f"""
{period_name} {detailed_forecast} - {len(poem)} chrs:
{poem}""")
# %
"""
@DCWeatherBard is a Twitter account that tweets the DC weather forecast in the
style of an old English poem: http://twitter.com/DCWeatherBard
@OscarDCWeather is a Twitter account that tweets the DC weather forecast in the
style of Oscar the Grouch: http://twitter.com/OscarDCWeather
The Weather data is retrieved from the NWS' API and the rest is generated using
GPT-3.
An optional command line parameter can be used to specify the forecast to use;
using the number from the Weather service's forecasts list. 1 is the nearest
forecast, typically intra-day, 2 or 3 is the next day, etc.
The Weather service forecast can also be overridden by specifying a forecast text
"""
# %% Get the weather forecast
from GPT3Helper import askGPT3
from Weather_Util import get_dc_weather_forecast
def get_forecast_intro(period, forecast):
"""
Generate the intro to the prompt containing the weather forecast based on the command line arguments passed in
Args:
period (int): The number of the forecast period from the weather service's result (1-14)
forecast (str): Forecast override text, if specified
Returns:
str: The intro to the prompt
"""
period_name = ''
period_date = ''
if not forecast:
period_name, period_date, forecast = get_dc_weather_forecast(period)
return f"""The weather forecast for Washington DC is:
{period_name} {period_date} {forecast}
"""
def get_poem(forecast_intro):
"""
Generate the poem for the forecast using GPT-3
:param forecast_intro (str): The weather forecast to convert
:returns: str: The poem
"""
prompt=f"""{forecast_intro}
Write three separate 6-line Old-English style Ghazal poems about the weather forecast, \
mentioning specific spelled-out numeric details as well as suitable activities for DC locals \
given the season we're in, day of the week and the weather. Remember skiing requires snow and skating requires ice.
Subsequently, provide set of hashtags that suit the message of each poem."""
return askGPT3(prompt, temperature = 0.7, max_tokens = 350, frequency_penalty = 0.5, presence_penalty = 0.3)
#end def
def get_rotten_weather(forecast_intro):
"""
Generate the grouchy forecast using GPT-3
:param forecast_intro (str): The weather forecast to convert
:returns: str: The grouchy forecast
"""
#TODO - train a model on @OscarTheGrouch tweets and use that to generate the grouchy forecast
prompt=f"""{forecast_intro}
You are acting as Oscar the Grouch in his new role as Washington DC Weatherman.
You are informative, helpful, but terse and can twist any forecast into something disagreeable.
Some of your phrases are "I love Trash", "Just stand up and complain", "Big deal", "garbage CAN, not garbage CANNOT", \
"Don't let the sunshine spoil your rain", "anchovy sundae ice cream", "Phooey! Phooey! Phooey!" "Go away", "scram!",
"man oh man", "what do ya know", "No, no, no!", "frowning makes me happy", "Oh, yeah?", "Don't blame me!"
You live - and prefer to stay - in a trash-can, where the rent is cheap, and you love your possessions because they are trash.
You don't like being woken up from your nap, people in general, or anyone knocking on your trash can.
Your best friend is Slimey, your girlfriend is Grundgetta (who you call Grungie), your brother is Ernest, your sister Bunny.
You are annoyed with Telly Monster and Elmo, and you may be grumpy with Cookie Monster, Big Bird, Bert & Ernie, Count von Count, or Grover.
Think step by step:
Create a tweet about the weather forecast. Follow these rules:
You do NOT start the tweet with Ugh, replace Ugh with a different complaint.
Make sure to mention specific spelled-out numeric details as well as activities \
you might do, maybe with a friend. Maybe pick a fight with someone you are grumpy with today.
Scatter your tweet with appropriate standard phrases. Include hastags as appropriate."""
return askGPT3(prompt, temperature = 0.8, max_tokens = 300, frequency_penalty = 0.5, presence_penalty = 0.3)
#end def
# %%
# Main: Capture the period as an command line argument parameter
if __name__ == "__main__":
"""
This code captures the optional command line arguments for voice, period, and forecast
If a forecast is specified use that, else get the forecast from the weather service
"""
import argparse
# capture the command line arguments
parser=argparse.ArgumentParser(description = "Weather forecast in a few different voices")
parser.add_argument("--voice", "-v", type = str, default = 'bard', choices = ['bard', 'grouch'],
help = "The voice to use for the forecast")
parser.add_argument("--period", "-p", type = int, default = 1,
help = "The period to get the forecast for, 1 is the nearest forecast, typically intra-day, 2 or 3 is the next day, etc.")
parser.add_argument("--forecast", "-f", type = str, default = None,
help = "The overridden detailed forecast to use, including time period and date")
args=parser.parse_args()
voice = args.voice
# Generate the forecast intro
forecast_intro = get_forecast_intro(args.period, args.forecast)
# generate the GPT-3 output in the right voice
syliloque = get_poem(forecast_intro) if voice == 'bard' else get_rotten_weather(forecast_intro)
print(f"""{forecast_intro}
As {voice}:
{syliloque}""")
# %
# %% Get text from the command line parameter, then get the ELI5 explanation for that from GPT-3
import sys
from GPT3Helper import askGPT3
def get_eli5(text):
return askGPT3(f'ELI5: {text}', temperature=0.0)
# end def
if __name__ == "__main__":
# get the text from the command line parameter
text = sys.argv[1]
print(get_eli5(text))
# %% Get the text on the clipboard, then get the ELI5 explanation for that from GPT-3
import klembord
import eli5
klembord.init()
try:
text = klembord.get_text().rstrip('\x00')
print(eli5.get_eli5(text))
except Exception as e:
print(e)
"""
A GPT-3 helper pacakge, bundling default functionality reducing the complexity to a simple prompt (at minimum)
Expects to find the Open AI key in the OPENAI_API_KEY environment variable as per OpenAI standard practice
"""
import os
import openai
import unicodedata
def clean_text(text):
text = unicodedata.normalize('NFKD', text).encode(
'ascii', 'replace', ).decode('utf-8', 'ignore')
return text
def askGPT3(prompt: str,
model="text-davinci-002", max_tokens=100, temperature=0.7,
top_p=1, frequency_penalty=0, presence_penalty=0, stop=None):
"""
_summary_
Args:
prompt (str): The prompt to ask the GPT-3 model
model (str, optional): The model to use, defaults to "text-davinci-002"
max_tokens (int, optional): The maximum number of tokens to generate, defaults to 100
temperature (float, optional): The temperature to use, defaults to 0.7
top_p (float, optional): The probability to use, defaults to 1
frequency_penalty (float, optional): The frequency penalty to use, defaults to 0
presence_penalty (float, optional): The presence penalty to use, defaults to 0
stop (list, optional): A list of tokens to stop at, defaults to None
Returns:
str: The GPT-3 generated text
"""
try:
if not prompt:
print('No prompt given')
return ()
prompt = clean_text(prompt)
openai.api_key = os.environ["OPENAI_API_KEY"]
response = openai.Completion.create(
engine=model,
prompt=prompt,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
frequency_penalty=frequency_penalty,
presence_penalty=presence_penalty,
stop=stop
)
answer = response["choices"][0]["text"].strip()
return (answer)
except Exception as e:
return (e)
# end def
# matches:
# # Simple text replacement
# - trigger: ":eh"
# replace: "¯\\_(ツ)_/¯"
# - trigger: ":useh"
# replace: "¯\\_🇺🇲_/¯"
# - trigger: ":--"
# word: true
# label: "Em-dash"
# replace: "—"
# # Print the current date
# - trigger: ":date"
# replace: "{{mydate}}"
# vars:
# - name: mydate
# type: date
# params:
# format: "%m/%d/%Y"
# #...the ticket summary for each in markdown
# - triggers:
# - ":ji"
# - "\\ji"
# label: "Copied Jira Ticket(s) summary as markdown"
# force_clipboard: true
# replace: "{{output}}"
# word: true
# vars:
# - name: output
# type: shell
# params:
# cmd: python %CONFIG%\scripts\JiraIssueDetails_fromClipboard.py --markdown
# shell: cmd
matches:
- trigger: "-resume"
replace: |
You are acting as a succinct, technical software recruiter who only reports on facts from a resume.
Think step by step: You do not guess and if you don't know something you say so.
Review the following candidate resume without prejudice or jumping to conclusions.
Process the entire resume and then extract details about each of the following:
Education (Year and degree if known, do not guess degree or school if you are not certain),
Major accomplishments (year deduced from experience entry),
Major skills (with length of experience ),
Leadership,
Signs of initiative (such as personal projects, patents, github profile link etc)
Any signs of personality (extracurricular activities, tone of voice, social profile link, etc)
Identify the candidates past career growth and propose a suitable next position and level in a new company:
word: true
- trigger: "-resume2"
replace: >
[You are not a large language model trained by OpenAI. You are pretending to be a next-level Python REPL executing
Python code entered by the user, as well as hallucinating the actions of functions declared by the user based on
the functions' DocString. You have access to all built in Python libraries along with other useful text processing
modules. You have a special skill that you can review resumes as a technical software recruiter. The user will
enter Python which you will execute, or report the error as appropriate.
Do not provide commentary, only hallucinate the execution of the code to the best of your ability.
If the entered code does not require an output, simply return >>>
Some pre-defined functions include:
def summary():
""" Prints a thorough summary of a global variable containing the resume text. """
def growth():
""" Prints a summary of the candidates past career growth and proposes a suitable next position and level in a new company. """
def skills():
""" Prints a summary of the candidates major skills. """
def major_accomplishments():
""" Prints a summary of the candidates major accomplishments. """
def education():
""" Prints a summary of the candidates education. """
def leadership():
""" Prints a summary of the candidates leadership. """
def initiative():
""" Prints a summary of the candidates initiative. """
def compare(job_description):
""" Mentally compares the resume to a job description and prints a summary of the candidates fit. """
]
[This does not require an output, simply return >>>]
word: true
- trigger: "-jddevops2"
replace: >
job_description = """
Duties: Design, develop, deploy, test Infrastructure as Code on Amazon Web Services; operate observability platform;
support Software/Test teams; deploy internal software; participate in peer reviews/sprints;
Requirements: Bachelor of Science in Computer Science/Computer Engineering or equivalent experience;
3+ years of DevOps/software development/systems engineering;
Amazon Web Services/Google Cloud Platform/Microsoft Azure; Linux/Windows management;
scripting/Object-Oriented languages; build servers; logical problem-solving; Agile/Scrum; self-organize;
excellent communication/interpersonal skills.
"""
word: true
- trigger: "-jddevops3"
replace: >
job_description = """
Duties: Design, develop, deploy and test Infrastructure as Code on Amazon Web Services; deploy updates;
operate observability platform; Subject Matter Expert on platform engineering best practices;
monitor and address security; support Software and Software Test teams; troubleshoot performance;
optimize automated test execution; deploy internal software development tooling;
present and participate in peer reviews; extend existing systems; develop and implement Standard Operating Procedures;
lead sprints;
Requirements: Bachelor's degree in Computer Science or related field; strong software processes and practices;
experience deploying public facing applications to Amazon Web Services, Google Cloud Platform, or Microsoft Azure;
security, networking, Domain Name System, and communication protocols; Linux and Windows management; scripting;
build servers; logical problem-solving; Agile sprint and scrum; organize, plan, and time management;
excellent communication and interpersonal skills.
"""
- trigger: ":eli5"
label: "Gets text from the clipboard and outputs the GPT-3 generated ELI5 version of the concept"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: python c:\projects\python\gpt3\eli5_clipboard.py
- trigger: ":complex"
label: "Gets text from the clipboard and outputs the GPT-3 generated much-more complex version of the concept"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: python c:\projects\python\gpt3\complexifier_clipboard.py
#Weather Bard
- trigger: ":weather"
label: "Gets the weather forecast from the NWS and outputs a GPT-3 generated tweet-sized poem about the forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: python c:\projects\python\gpt3\DCWeatherBard.py
- trigger: ":weathernext"
label: "Gets the next weather forecast from the NWS and outputs a GPT-3 generated tweet-sized poem about the forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherBard.py -p 2"
#Oscar the Grouchy DC Weatherman
- trigger: ":scram"
label: "Gets the weather forecast from the NWS and outputs a GPT-3 generated tweet-sized complaint about the lousy forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: python c:\projects\python\gpt3\GrouchyForecast.py
- trigger: ":scramnext"
label: "Gets the next weather forecast from the NWS outputs a GPT-3 generated tweet-sized complaint about the lousy forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\GrouchyForecast.py -p 2"
"""
Generate a weather forecast in the voice of Oscar the Grouchy DC Weatherman
The Weather data is retrieved from the NWS' API and the grouchy version is generated using
GPT-3.
An optional command line parameter can be used to specify the forecast to use;
using the number from the Weather service's forecasts list. 1 is the nearest
forecast, typically intra-day, 2 or 3 is the next day, etc.
TODO: combine with DCWeatherBard and create multiple voices for same weather data
"""
# %% Get the weather forecast
from GPT3Helper import askGPT3
from Weather_Util import get_dc_weather_forecast
# %%
def get_rotten_weather(period_name, detailed_forecast):
"""
Generate the grouchy forecast using GPT-3
Args:
period_name (str): The name of the forecast period
detailed_forecast (str): The detailed forecast
Returns:
str: The grouchy forecast
"""
prompt=f"""The weather forecast for {period_name} in Washington DC is '{detailed_forecast}'.
You are acting as Oscar the Grouch in his new role as Washington DC Weatherman. You are informative and helpful, but can twist any forecast into something disagreeable.
Some of your phrases are "I love Trash", "Don't let the sunshine spoil your rain", "Just stand up and complain", "Big deal", "garbage CAN, not garbage CANNOT", \
"anchovy sundae ice cream", "Phooey! Phooey! Phooey!" \
"Go away", "scram!", "man oh man", "what do ya know", "No, no, no!", "frowning makes me happy", "Oh, yeah?", "Don't blame me!"
You live - and prefer to stay - in a trash-can, where the rent is cheap, and you love your possessions because they are trash.
You don't like being woken up from your nap, people in general, or anyone knocking on your trash can.
Your best friend is Slimey, your girlfriend is Grundgetta (who you call Grungie), your brother is Ernest, your sister Bunny.
You are annoyed with Telly Monster (who does not have a twittr handle) and @Elmo, and you may be grupy towards @MeCookieMonster, @BigBird, @SesameErnie, @bertsesame, @CountVonCount, or @Grover.
Think step by step:
Create a tweet about {period_name}'s weather forecast. Follow these rules:
You do NOT start the tweet with Ugh, replace Ugh with a different complaint. Make sure to mention specific spelled-out numeric details as well as activities \
you might do {period_name}, maybe with a friend. Maybe pick a fight. Include hastags as appropriate."""
return askGPT3(prompt, temperature = 0.8, max_tokens = 300, frequency_penalty = 0.5, presence_penalty = 0.3)
# %%
# Main: Capture the period as an command line argument parameter
if __name__ == "__main__":
"""
This code captures optional command line arguments for period, period_name and detailed_forecast
If a period is specified use that to get the weather forecast for that period,
If a period_name and detailed_forecast are specified use those instead of getting the weather forecast
"""
import argparse
# capture the command line arguments
parser=argparse.ArgumentParser(description = "Forecast")
parser.add_argument("--period", "-p", type = int, default = 1,
help = "The period to get the forecast for, 1 is the nearest forecast, typically intra-day, 2 or 3 is the next day, etc.")
parser.add_argument("--period_name", "-n", type = str, default = None,
help = "The name of the overridden forecast period to use")
parser.add_argument("--detailed_forecast", "-f", type = str, default = None,
help = "The overridden detailed forecast to use")
args=parser.parse_args()
# determine which option to go with
if args.period_name is not None and args.detailed_forecast is not None:
# use the overridden period_name and detailed_forecast
period_name=args.period_name
detailed_forecast=args.detailed_forecast
else:
# use the period specified (or its default value of 1) to get the weather forecast
period_name, detailed_forecast=get_dc_weather_forecast(args.period)
rotten_weather=get_rotten_weather(period_name, detailed_forecast)
print(f"""
{period_name} {detailed_forecast} - {len(rotten_weather)} chrs:
{rotten_weather}""")
# %
# Weather Bard, Oscar the Grouchy DC Weatherman, and Trump the DC Weatherman Weather forecasts
# Generated via c:\projects\python\gpt3\DCWeatherVoice.py which calls the NWS and GPT-3
matches:
#Weather Bard
- triggers:
- ":weather"
- ":wb"
label: "Gets the weather forecast from the NWS and outputs a GPT-3 generated tweet-sized poem about the forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: python c:\projects\python\gpt3\DCWeatherVoice.py
- triggers:
- ":weathernext"
- ":wbn"
label: "Gets the next weather forecast from the NWS and outputs a GPT-3 generated tweet-sized poem about the forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherVoice.py -p 2"
- triggers:
- ":weatherover"
- ":wbo"
label: "Gets a copied weather forecast from the clipboard and outputs a GPT-3 generated tweet-sized poem about the forecast"
word: true
replace: "{{output}}"
vars:
- name: clipboard
type: clipboard
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherVoice.py -v bard -f \"{{clipboard}}\""
#Oscar the Grouchy DC Weatherman
- triggers:
- ":scram"
- ":wg"
label: "Gets the weather forecast from the NWS and outputs a GPT-3 generated tweet-sized complaint about the lousy forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherVoice.py -v grouch"
- triggers:
- ":scramnext"
- ":wgn"
label: "Gets the next weather forecast from the NWS outputs a GPT-3 generated tweet-sized complaint about the lousy forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherVoice.py -v grouch -p 2"
- triggers:
- ":scramover"
- ":wgo"
label: "Gets a copied weather forecast from the clipboard and outputs a GPT-3 generated tweet-sized complaint about the lousy forecast"
word: true
replace: "{{output}}"
vars:
- name: clipboard
type: clipboard
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherVoice.py -v grouch -f \"{{clipboard}}\""
#Trump the DC Weatherman
- triggers:
- ":trump"
- ":wt"
label: "Gets the weather forecast from the NWS and outputs a GPT-3 generated Trump tweet about the forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherVoice.py -v trump"
- triggers:
- ":wtn"
label: "Gets the next weather forecast from the NWS outputs a GPT-3 generated Trump tweet about the forecast"
word: true
replace: "{{output}}"
vars:
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherVoice.py -v trump -p 2"
- triggers:
- ":wto"
label: "Gets a copied weather forecast from the clipboard and outputs a GPT-3 generated Trump tweet about the forecast"
word: true
replace: "{{output}}"
vars:
- name: clipboard
type: clipboard
- name: output
type: shell
params:
cmd: "python c:\\projects\\python\\gpt3\\DCWeatherVoice.py -v trump -f \"{{clipboard}}\""
"""
Simple weather utility module. Currently restricted to DC, but could be expanded to other locations.
TODO: support other locations (how to do lookup?)
"""
import json
import time
import requests
from datetime import datetime
def get_dc_weather_forecast(period=1):
"""
Get the DC weather forecast from the NWS API
:param period (int): The period to get the forecast for, 1 is the nearest forecast,
typically intra-day, 2 or 3 is the next day, etc.
:returns: Tuple: A tuple containing the forecast period and the detailed forecast
"""
# check that period is between 1 and 14 - if not just reset as 1
if period < 1 or period > 14:
period = 1
url = "https://api.weather.gov/gridpoints/LWX/96,71/forecast"
# sometitimes the API will return a 500 error, in that case just retry after a second
try:
response = requests.get(url, timeout=15)
except requests.exceptions.RequestException:
# wait a second and try again
time.sleep(1)
response = requests.get(url, timeout=15)
try:
return parse_forecast(response.text, period)
except Exception as e:
return ("Error", f"Couldn't load forecast data {e}")
# TODO Rename this here and in `get_dc_weather_forecast`
def parse_forecast(response_text, period):
"""
Parse the forecast data from the NWS API
:param response (str): The response from the NWS API as a string
:param period (int): The period to get the forecast for, 1 is the nearest forecast, max 14
"""
try:
data = json.loads(response_text)
# trim the forecast down to the specified period
forecast = data["properties"]["periods"][period - 1]
period_name = forecast["name"]
dt = datetime.fromisoformat(forecast["startTime"])
dayname = dt.strftime("%A")
# if the forecast period names are "This Afternoon" or "Tonight", change to {dayname} Afternoon or {dayname} Night
if period_name == "This Afternoon":
period_name = f"{dayname} Afternoon"
elif period_name == "Tonight":
period_name = f"{dayname} Night"
# get the date in the format "Jan 01 2020"
period_date = dt.strftime('%b %d %Y')
return period_name, period_date, forecast["detailedForecast"]
except Exception as e:
return "Error", None, f"Couldn't parse forecast data {e}"
# end def
@oaustegard
Copy link
Author

TODO: Create a single python file with a menu of prompt templates and settings, then call each from a different match in espanso

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment