Skip to content

Instantly share code, notes, and snippets.

@waseemhnyc
Created April 24, 2024 00:40
Show Gist options
  • Save waseemhnyc/259945a6191e757fd52cade57808d4de to your computer and use it in GitHub Desktop.
Save waseemhnyc/259945a6191e757fd52cade57808d4de to your computer and use it in GitHub Desktop.
# set your OPENAI_API_KEY in your .env
from openai.types.chat import ChatCompletionMessageParam
from mirascope.openai import OpenAICall
from dotenv import load_dotenv
load_dotenv()
class Librarian(OpenAICall):
prompt_template = """
SYSTEM: You are the world's greatest librarian.
MESSAGES: {history}
USER: {question}
"""
question: str
history: list[ChatCompletionMessageParam] = []
librarian = Librarian(question="", history=[])
while True:
librarian.question = input("(User): ")
response = librarian.call()
librarian.history += [
{"role": "user", "content": librarian.question},
{"role": "assistant", "content": response.content},
]
print(f"(Assistant): {response.content}")
# connecting ollama/llama3
from openai.types.chat import ChatCompletionMessageParam
from mirascope.openai import OpenAICall, OpenAICallParams
class Librarian(OpenAICall):
prompt_template = """
SYSTEM: You are the world's greatest librarian.
MESSAGES: {history}
USER: {question}
"""
question: str
history: list[ChatCompletionMessageParam] = []
api_key = "ollama"
base_url = "http://localhost:11434/v1/"
call_params = OpenAICallParams(model="llama3")
librarian = Librarian(question="", history=[])
while True:
librarian.question = input("(User): ")
response = librarian.call()
librarian.history += [
{"role": "user", "content": librarian.question},
{"role": "assistant", "content": response.content},
]
print(f"(Assistant): {response.content}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment