Last active
May 15, 2023 14:19
-
-
Save ajalexander/89385ce0e927265c1395fc9c8c1861c1 to your computer and use it in GitHub Desktop.
Example code for OpenAI API usage
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import openai | |
import os | |
openai.api_key = os.getenv('OPENAI_API_KEY') | |
completion_model = 'text-davinci-003' | |
chat_model = 'gpt-3.5-turbo' | |
def text_completion_single(): | |
completion = openai.Completion.create(model=completion_model, prompt='What are the three primary colors?') | |
print(completion.choices[0].text) | |
def text_completion_multiple(): | |
completion = openai.Completion.create(model=completion_model, prompt='How do you get from Indianapolis to Chicago?', n=4, max_tokens=1024) | |
for choice in completion.choices: | |
print(choice.text) | |
def text_completion_temperature(): | |
prompt = 'Tell a (very) short fictional story about foxes in the wild' | |
for temperature in [0, 0.8, 1.5]: | |
completion = openai.Completion.create(model=completion_model, prompt=prompt, temperature=temperature, max_tokens=256) | |
print(f'Temp {temperature}: {completion.choices[0].text}') | |
def text_completation_chat_no_history(): | |
while (True): | |
print('Prompt: ') | |
prompt = input() | |
if prompt == "": | |
break | |
completion = openai.Completion.create(model=completion_model, prompt=prompt, max_tokens=250) | |
answer = completion.choices[0].text | |
print(f'Answer: {answer}') | |
def text_completation_chat_with_history(): | |
history=[] | |
while (True): | |
print('Prompt: ') | |
prompt = input() | |
if prompt == '': | |
break | |
formatted_history = '\n'.join(history) | |
formatted_prompt = f'Given previous conversation history of:\n{formatted_history}\n\n{prompt}' | |
completion = openai.Completion.create(model=completion_model, prompt=formatted_prompt, max_tokens=250) | |
answer = completion.choices[0].text | |
history.append(f'User: "{prompt}"') | |
history.append(f'Agent: "{answer}"') | |
print(f'Answer: {answer}') | |
def chat_completion_with_history(): | |
history=[] | |
while (True): | |
print('Prompt: ') | |
prompt = input() | |
if prompt == '': | |
break | |
history.append({ 'role': 'user', 'content': prompt }) | |
chat = openai.ChatCompletion.create(model=chat_model, messages=history) | |
response = chat.choices[0].message | |
history.append(response) | |
print(f'Answer: {response.content}') | |
# text_completion_single() | |
# text_completion_multiple() | |
# text_completion_temperature() | |
# text_completation_chat_no_history() | |
# text_completation_chat_with_history() | |
# chat_completion_with_history() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
openai==0.27.2 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment