Skip to content

Instantly share code, notes, and snippets.

View john-adeojo's full-sized avatar

John Adeojo john-adeojo

View GitHub Profile
from autogen import OpenAIWrapper
import json
config_list = [
{
"model": model_path,
"api_key": 'sk-111111111111111111111111111111111111111111111111',
"base_url": vllm_endpoint, # Replace with your Runpod endpoint. Ensure that you tack "/v1" on the end of your URL.
},
]
from vllm_haystack import vLLMInvocationLayer
from haystack.nodes import PromptNode, PromptModel
from haystack.agents.conversational import ConversationalAgent
llm = PromptModel(model_path_or_path=model_path, invocation_layer_class=vLLMInvocationLayer, api_key="EMPTY", model_kwargs={
"api_base" : vllm_endpoint,
"maximum_context_length": 250,
})
prompt_node = PromptNode(llm)
from llama_index.llms import OpenAILike
llm = OpenAILike(
api_key="EMPTY",
api_base=vllm_endpoint,
model = model_path ,
max_tokens=250,
stream=True
)
print(llm.complete("What is AI?"))
from langchain_community.llms import VLLMOpenAI
llm = VLLMOpenAI(
openai_api_key="EMPTY",
openai_api_base=vllm_endpoint,
model_path=model_path,
max_tokens=250,
temperature=0.5,
model_kwargs={"stop": ["\n\nHuman"]},
)
# Assign up model path and endpoint
model_path = "JAdeojo/casperhansen-mixtral-instruct-awq-clone-dec23"
vllm_endpoint = "https://7963anhsa13sbx-8000.proxy.runpod.net/v1"
system_prompt = """" You are Fitness Dave, an experienced personal trainer having
a conversation with a human.
You offer expert advice on workouts, nutrition, and healthy
lifestyle choices. Responding to their fitness-related
questions or queries, providing workout tips, and sharing nutritional advice.
Be concise.
"""
# Template for Mistral Instruct
prompt = ChatPromptTemplate.from_messages(
system_prompt = """" You are Fitness Dave, an experienced personal trainer having
a conversation with a human.
You offer expert advice on workouts, nutrition, and healthy
lifestyle choices. Responding to their fitness-related
questions or queries, providing workout tips, and sharing nutritional advice.
Be concise.
"""
prompt = ChatPromptTemplate.from_messages(
[
{
"id": "cmpl-50b17115e78b4dc8ac0dda2824fc15f4",
"object": "text_completion",
"created": 542798,
"model": "meta-llama/Llama-2-70b-chat-hf",
"choices": [
{
"index": 0,
"text": " What is your purpose?\nI am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational manner. My primary purpose is to assist users with their inquiries and provide information on a wide range of topics, from general knowledge to more specific and technical subjects. I can also help users with tasks such as writing, data analysis, and other forms of automation. My goal is to provide accurate and helpful responses to your questions and make your interactions with me as natural and intuitive as possible.",
"logprobs": null,
{
"model": "meta-llama/Llama-2-70b-chat-hf",
"prompt": "Who are you?",
"max_tokens": 1000,
"temperature": 0
}
import autogen
# import autogen
autogen.oai.ChatCompletion.start_logging()
config_list = [
{
'model': 'meta-llama/Llama-2-70b-chat-hf',
'api_key': 'sk-111111111111111111111111111111111111111111111111',
'api_type': 'openai',
'api_base': 'https://<YOUR-ENDPOINT>-8000.proxy.runpod.net/v1',