Skip to content

Instantly share code, notes, and snippets.

@defreez
Created November 8, 2023 20:40
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save defreez/40e20d29e28e0c4484bda1e53c455687 to your computer and use it in GitHub Desktop.
Save defreez/40e20d29e28e0c4484bda1e53c455687 to your computer and use it in GitHub Desktop.
Simple CLI chat demo
from openai import OpenAI
from dotenv import load_dotenv
import os
# The dotenv module provides the load_dotenv() function to load environment
# variables from a .env file into os.environ. This is particularly useful for
# sensitive information such as API keys, database passwords, etc. that you
# don't want to hard-code into your application. In this case, we're using it
# to load the .env file.
load_dotenv()
# The os module in Python provides a way of using operating system dependent
# functionality, such as reading or writing environment variables. Here, we're
# using the os.getenv function to retrieve the value of the OPENAI_API_KEY
# environment variable. This function returns the value of the environment
# variable key if it exists, or None if it does not. Storing the API key in an
# environment variable keeps it out of your source code, which is a good
# practice for security. In this case, we're retrieving the API key for OpenAI
# that's been stored in the environment variable.
api_key = os.getenv('OPENAI_API_KEY')
# Initialize the OpenAI client
client = OpenAI()
def start_chat():
messages = [
{"role": "system", "content": "You are a helpful assistant."}
]
return messages
def chat_with_model(messages):
# Specify the 'stream' parameter here
response = client.chat.completions.create(
model="gpt-4-1106-preview",
messages=messages,
stream=True
)
# This will now be a streaming response
return response
def main():
welcome_message = "Welcome to the CLI Chat! Type 'quit' to exit."
print(welcome_message)
chat_history = start_chat()
while True:
user_input = input("You: ")
if user_input.lower() == 'quit':
break
# Append the user's message to chat history
chat_history.append({"role": "user", "content": user_input})
# Get the AI's response
response_iter = chat_with_model(chat_history)
# Since this is streaming, we iterate through the response
response = ""
print("AI: ", end="")
for response_delta in response_iter:
ai_text = response_delta.choices[0].delta.content
if not ai_text:
ai_text = ""
response += ai_text
print(f"{ai_text}", end="")
# Append the user's message to chat history
chat_history.append({"role": "assistant", "content": response})
print()
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment