Last active
February 19, 2024 17:55
-
-
Save neubig/80de662fb3e225c18172ec218be4917a to your computer and use it in GitHub Desktop.
A simple script to get results from the OpenAI Asynchronous API
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# NOTE: | |
# You can find an updated, more robust and feature-rich implementation | |
# in Zeno Build | |
# - Zeno Build: https://github.com/zeno-ml/zeno-build/ | |
# - Implementation: https://github.com/zeno-ml/zeno-build/blob/main/zeno_build/models/providers/openai_utils.py | |
import openai | |
import asyncio | |
from typing import Any | |
async def dispatch_openai_requests( | |
messages_list: list[list[dict[str,Any]]], | |
model: str, | |
temperature: float, | |
max_tokens: int, | |
top_p: float, | |
) -> list[str]: | |
"""Dispatches requests to OpenAI API asynchronously. | |
Args: | |
messages_list: List of messages to be sent to OpenAI ChatCompletion API. | |
model: OpenAI model to use. | |
temperature: Temperature to use for the model. | |
max_tokens: Maximum number of tokens to generate. | |
top_p: Top p to use for the model. | |
Returns: | |
List of responses from OpenAI API. | |
""" | |
async_responses = [ | |
openai.ChatCompletion.acreate( | |
model=model, | |
messages=x, | |
temperature=temperature, | |
max_tokens=max_tokens, | |
top_p=top_p, | |
) | |
for x in messages_list | |
] | |
return await asyncio.gather(*async_responses) | |
predictions = asyncio.run( | |
dispatch_openai_requests( | |
messages_list=[ | |
[{"role": "user", "content": "Write a poem about asynchronous execution."}], | |
[{"role": "user", "content": "Write a poem about asynchronous pirates."}], | |
], | |
model="gpt-3.5-turbo", | |
temperature=0.3, | |
max_tokens=200, | |
top_p=1.0, | |
) | |
) | |
for i, x in enumerate(predictions): | |
print(f"Response {i}: {x['choices'][0]['message']['content']}\n\n") |
thank you, this is a much more compact example compared to the openai-cookbook ones on the topic
Does not work, it says:
messages_list: list[list[dict[str,Any]]],
TypeError: 'type' object is not subscriptable
It works only when I use:
async def dispatch_openai_requests(messages_list,model,temperature,max_tokens,top_p):
instead of the full list if defined variables.
So @neubig thanks for the code.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
This script can be used under the MIT license.