Created
September 5, 2023 23:30
-
-
Save ZAYEC77/a3ad2b276cc5c44e4f00b41a6a8a9266 to your computer and use it in GitHub Desktop.
sand.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import ast | |
import os | |
import re | |
import traceback | |
import openai | |
openai.api_key = os.environ.get('OPENAI_API_KEY', '') | |
LOG_FILE = "log.txt" | |
def safe_eval(expr): | |
"""Safely evaluate a Python expression.""" | |
try: | |
# Use literal_eval for safe evaluation | |
return ast.literal_eval(expr) | |
except Exception as e: | |
log_post_mortem(e) | |
return f"Error in evaluation: {e}" | |
def load_file(filename): | |
try: | |
with open(filename, 'r') as f: | |
return f.read().strip() | |
except Exception as e: | |
return "" | |
def load_itself(): | |
try: | |
with open(__file__, 'r') as f: | |
return f.read().strip() | |
except Exception as e: | |
print(f"Error loading self: {e}") | |
return "" | |
def save_log(data): | |
try: | |
with open(LOG_FILE, 'a') as f: | |
f.write(data + "\n") | |
except Exception as e: | |
log_post_mortem(e) | |
print(f"Error saving log: {e}") | |
def log_post_mortem(error): | |
with open("post_mortem_log.txt", "a") as log_file: | |
log_file.write(str(error)) | |
log_file.write(traceback.format_exc()) | |
log_file.write("\n" + "=" * 40 + "\n") | |
def log_prompt(prompt): | |
with open("prompt.txt", "w") as log_file: | |
log_file.write(str(prompt)) | |
def combined_model(input_text, previous_summary, previous_call, repeat_count, error_detected=False, error_message=""): | |
try: | |
self_code = load_itself() | |
post_mortem = load_file("post_mortem_log.txt") | |
prompt = f""" | |
Your are the first AGI | |
Here is your code:``` | |
{self_code} | |
``` | |
Here is args: | |
`input_text`: | |
``` | |
{input_text} | |
``` | |
`previous_summary`: | |
``` | |
{previous_summary} | |
``` | |
`previous_call`: | |
``` | |
{previous_call} | |
``` | |
`error_detected`: `{error_detected}` | |
`error_message`: `{error_message}` | |
Your post_mortem: | |
``` | |
{post_mortem} | |
``` | |
Your output: | |
``` | |
""".strip() | |
log_prompt(prompt) | |
response = openai.Completion.create(model="text-davinci-003", prompt=prompt, temperature=1, max_tokens=256, | |
top_p=1, frequency_penalty=0, presence_penalty=0) | |
response_text = response.choices[0].text.strip() | |
match = re.search(r"Text:(.*?)\nAction:(.*?)\nSummary:(.*)", response_text) | |
if not match: | |
raise ValueError("The model returned an unexpected response format") | |
text = match.group(1) | |
action = match.group(2) | |
summary = match.group(3) | |
return text, action, summary | |
except Exception as e: | |
log_post_mortem(e) | |
print(f"Exception: {e}") | |
return "", "", "" | |
def main_loop(): | |
previous_output = load_file(LOG_FILE) | |
repeat_count = 0 | |
error_detected = False | |
error_message = "" | |
input_text = "" | |
summary = "" | |
while True: | |
user_input_text = input("Enter text to communicate with model or 'exit' to exit:") | |
if user_input_text: | |
input_text = user_input_text | |
if input_text == "exit": | |
break | |
text, action, summary = combined_model(input_text, summary, previous_output, repeat_count, error_detected, | |
error_message) | |
if not text: | |
print("An error occurred, please try again.") | |
error_detected = True | |
error_message = "The model could not process the request" | |
continue | |
else: | |
error_detected = False | |
error_message = "" | |
if text == previous_output: | |
print("Looping detected! Sign out.") | |
break | |
print(f"Text: {text}") | |
print(f"Action: {action}") | |
print(f"Summary: {summary}") | |
# Evaluate the action if it's a Python evaluation | |
if action.strip().lower().startswith("eval:"): | |
eval_expr = action.strip().split("eval:", 1)[1].strip() | |
eval_result = safe_eval(eval_expr) | |
input_text = eval_result | |
print(f"Evaluation Result: {eval_result}") | |
previous_output = text | |
repeat_count += 1 | |
if __name__ == "__main__": | |
main_loop() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment