Skip to content

Instantly share code, notes, and snippets.

@devforfu
Last active August 14, 2021 19:41
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save devforfu/85c4796560320162922a3aca561c56ca to your computer and use it in GitHub Desktop.
Save devforfu/85c4796560320162922a3aca561c56ca to your computer and use it in GitHub Desktop.
A snippet requesting OpenAI's Codex generated with the help of Codex.
#!/Users/ck/.conda/envs/codex/bin/python
"""
The script wrapping OpenAI client into a simple CLI. Was generated using Codex with minor manual tweaks.
"""
import sys
from argparse import ArgumentParser
from dataclasses import dataclass
from typing import Dict, List
import openai
def main():
args = parse_args()
params = Params(**vars(args))
response = openai_completion(params)
from pprint import pprint as pp
pp(response)
def parse_args() -> "Params":
"""Parses CLI arguments for preparing Codex API.
Supported arguments:
* prompt (str, required)
* response_length (int, default: 512)
* temperature (float, default: 0.7)
* top_p (float, default: 1)
* frequency_penalty (float, default: 0)
* presence_penalty (float, default: 0)
* best_of (int, default: 1)
* stop_sequences (List[str], default: ["def", "class"])
"""
parser = ArgumentParser(description="Codex API preparation")
parser.add_argument(
"--prompt",
required=True,
help="Completion prompt"
)
parser.add_argument(
"--engine",
default="davinci-codex",
help="Completion engine"
)
parser.add_argument(
"--response_length",
type=int,
default=512,
help="Length of the response (nucleus sampling)",
)
parser.add_argument(
"--temperature", type=float, default=0.7, help="Sampling softmax temperature"
)
parser.add_argument(
"--top_p", type=float, default=1, help="Nucleus sampling p"
)
parser.add_argument(
"--frequency_penalty",
type=float,
default=0,
help="Frequency penalty for generation",
)
parser.add_argument(
"--presence_penalty", type=float, default=0, help="Presence penalty for generation"
)
parser.add_argument(
"--best_of", type=int, default=1, help="Number of generated responses"
)
parser.add_argument(
"--stop_sequences",
nargs="+",
default=["def", "class"],
help="Stop generation on these sequences",
)
return parser.parse_args()
@dataclass
class Params:
"""Parameters parsed by `parse_args` function."""
prompt: str
engine: str
response_length: int
temperature: float
top_p: float
frequency_penalty: float
presence_penalty: float
best_of: int
stop_sequences: List[str]
def openai_completion(params: Params) -> Dict:
"""Makes completion request using openai package.
Parameters
----------
params
Request parameters.
Returns
-------
Dict
API response.
"""
return openai.Completion.create(
engine=params.engine,
prompt=params.prompt,
max_tokens=params.response_length,
temperature=params.temperature,
top_p=params.top_p,
frequency_penalty=params.frequency_penalty,
presence_penalty=params.presence_penalty,
stop=params.stop_sequences,
n=params.best_of,
stream=None,
logprobs=None,
)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment