Skip to content

Instantly share code, notes, and snippets.

@buanzo
Created September 30, 2023 13:59
Show Gist options
  • Save buanzo/47c56e18e42d8ba5cf0b972db56c9c72 to your computer and use it in GitHub Desktop.
Save buanzo/47c56e18e42d8ba5cf0b972db56c9c72 to your computer and use it in GitHub Desktop.
buanzo's dyson.py - openai llm aided meta-functions
import os
import openai
import inspect
from typing import Callable
import json
class Dyson:
def __init__(self, func: Callable, model='gpt-4'):
self.func = func
self.model = model
self.metadata = self._get_metadata()
def _get_metadata(self):
metadata = {}
metadata['name'] = self.func.__name__
argspec = inspect.getfullargspec(self.func)
metadata['args'] = argspec.args
docstring = inspect.getdoc(self.func)
if docstring:
metadata['docstring'] = docstring
else:
raise ValueError("Function must have a docstring.")
return metadata
def _create_role(self):
role = f"""
You are an AI that calculates the result of this: {self.metadata['docstring']}.
User prompt indicates function arguments.
If user prompts 'EXECUTE', then no arguments are passed to the function."""
return role
def __call__(self, *args, **kwargs):
role = self._create_role()
prompt = f"{args if args else self.metadata['args']}"
len_args = len(self.metadata['args'])
if len_args == 0:
prompt = 'EXECUTE'
ai_response = self.run_gpt_by_role(role, prompt)
return ai_response
def run_gpt_by_role(self, role, prompt):
response = openai.ChatCompletion.create(model=self.model,
messages=[{"role": "system", "content": role},
{"role": "user", "content": prompt}
])
r = response["choices"][0]["message"]["content"]
return r
@Dyson
def say_hello():
"""
Generate a friendly greeting return as a python dictionary in english, spanish and klingon.
no explanation. no codeblocks. just json.
"""
pass
# lets test it
a = say_hello()
print(a)
print(type(a))
print(json.loads(a))
@buanzo
Copy link
Author

buanzo commented Oct 22, 2023

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment