Skip to content

Instantly share code, notes, and snippets.

@mme
Last active January 30, 2024 17:16
Show Gist options
  • Save mme/1ecb1f96f1a9b0f7e7371496241a0b4e to your computer and use it in GitHub Desktop.
Save mme/1ecb1f96f1a9b0f7e7371496241a0b4e to your computer and use it in GitHub Desktop.
# Adapted from: https://github.com/anshumankmr/Streaming-ChatGPT-Responses-in-ReactJS/blob/main/server/app.py
# See also: https://stackoverflow.com/questions/76311866/unable-to-send-openai-stream-response-via-flask-api
# Write a simple flask app with one route to return an response from OpenAI's Gpt3.5 turbo model using the completions API
from flask import Flask, Response, stream_with_context, request
import os
import openai
import requests
from flask_cors import CORS
from dotenv import load_dotenv
import json
app = Flask(__name__)
CORS(app)
def chat_gpt_helper(prompt):
"""
This function returns the response from OpenAI's Gpt3.5 turbo model using the completions API
"""
try:
openai.api_key = os.getenv('OPEN_API_KEY')
for chunk in openai.ChatCompletion.create(
model="gpt-4-1106-preview",
messages=[{
"role": "user",
"content":prompt
}],
stream=True,
):
yield f'data: %s\n\n' % json.dumps(chunk)
yield f'data: [DONE]\n\n'
except Exception as e:
print(e)
return str(e)
@app.route('/create-completions/gpt3', methods=['POST'])
def stream_chat_gpt():
"""
This streams the response from ChatGPT
"""
prompt = request.get_json(force = True).get('prompt','')
return Response(stream_with_context(chat_gpt_helper(prompt)),
mimetype='text/event-stream')
if __name__ == '__main__':
app.run(host="0.0.0.0", debug=True, threaded = True)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment