Skip to content

Instantly share code, notes, and snippets.

@sincethestudy
Last active October 1, 2023 05:29
Show Gist options
  • Save sincethestudy/673c2a29431397bd4c72d1189d483891 to your computer and use it in GitHub Desktop.
Save sincethestudy/673c2a29431397bd4c72d1189d483891 to your computer and use it in GitHub Desktop.
import os
import pickle
import hashlib
import asyncio
class Cache:
def __init__(self, cache_file):
self.cache_file = cache_file
os.makedirs(os.path.dirname(cache_file), exist_ok=True)
if os.path.exists(cache_file):
try:
with open(cache_file, 'rb') as f:
self.cache = pickle.load(f)
except EOFError:
self.cache = {}
else:
self.cache = {}
def get(self, key):
return self.cache.get(key)
def set(self, key, value):
self.cache[key] = value
with open(self.cache_file, 'wb') as f:
pickle.dump(self.cache, f)
cache = Cache(os.path.join(os.path.dirname(__file__), 'api_cache.pkl'))
def cache_calls(function):
if asyncio.iscoroutinefunction(function):
async def wrapper(*args, use_cache=False, **kwargs):
if use_cache:
key = hashlib.sha256(str((function.__name__, args, kwargs)).encode()).hexdigest()
result = cache.get(key)
if result is None:
result = await function(*args, **kwargs)
cache.set(key, result)
else:
result = await function(*args, **kwargs)
return result
else:
def wrapper(*args, use_cache=False, **kwargs):
if use_cache:
key = hashlib.sha256(str((function.__name__, args, kwargs)).encode()).hexdigest()
result = cache.get(key)
if result is None:
result = function(*args, **kwargs)
cache.set(key, result)
else:
result = function(*args, **kwargs)
return result
return wrapper
#example usage for cache_calls decorator
@cache_calls
def complete(system_prompt=None, user_prompt=None, messages=None, model='gpt-3.5-turbo-16k', temperature=0, cache_calls=False, **kwargs):
# If messages are not provided, create a new list with the user_prompt
if messages is None:
messages = [{'role': 'user', 'content': user_prompt}]
# If a system_prompt is provided, add it to the start of the messages
if system_prompt is not None:
messages = [{'role': 'system', 'content': system_prompt}] + messages
response = openai.ChatCompletion.create(
messages=messages,
model=model,
temperature=temperature,
**kwargs
)
if kwargs.get('n', 1) == 1:
result = response.choices[0].message.content
else:
result = [choice.message.content for choice in response.choices]
return result
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment