Skip to content

Instantly share code, notes, and snippets.

@avelican
Created May 1, 2023 10:26
Show Gist options
  • Save avelican/72edfa87d200efe7294d2ace812c5f97 to your computer and use it in GitHub Desktop.
Save avelican/72edfa87d200efe7294d2ace812c5f97 to your computer and use it in GitHub Desktop.
Parallelize GPT API
# import openai
import os
import asyncio
import aiohttp
import random
OPENAI_API_KEY = os.environ['OPENAI_API_KEY']
async def fetch_completion(prompt, session, max_tokens=2048):
async with session.post(
# "https://api.openai.com/v1/engines/davinci-codex/completions",
"https://api.openai.com/v1/completions",
headers={"Authorization": f"Bearer {OPENAI_API_KEY}"},
json={
"model": "text-davinci-003",
"prompt": prompt,
"max_tokens": 2048,
"temperature": 0.5,
# "n": 1, # optional, default n=1
},
) as response:
return await response.json() # result["choices"][0]["text"] ? # untested
async def fetch_chat_completion(prompt, session, max_tokens=2048):
async with session.post(
"https://api.openai.com/v1/chat/completions",
headers={
"Authorization": f"Bearer {OPENAI_API_KEY}",
"Content-Type": "application/json"
},
json={
"model": "gpt-3.5-turbo",
# "prompt": prompt,
"messages": [{"role": "user", "content": prompt}],
"max_tokens": max_tokens, # todo
"temperature": 0.5,
# "n": 1, # optional, default n=1
},
) as response:
reply = await response.json()
if 'choices' in reply:
return reply['choices'][0]['message']["content"]
else:
return reply
def random_prompt():
a = random.randint(10,100)
b = random.randint(10,100)
return f"What is {a} + {b} ?"
async def main():
async with aiohttp.ClientSession() as session:
prompts = [random_prompt() for _ in range(100)]
tasks = [fetch_chat_completion(prompt, session, max_tokens=10) for prompt in prompts]
results = await asyncio.gather(*tasks)
for result in results:
print(result)
if __name__ == "__main__":
asyncio.run(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment