Skip to content

Instantly share code, notes, and snippets.

@alevz257
Created August 29, 2023 07:38
Show Gist options
  • Save alevz257/fda6d0eefef56d4853459c66d9189167 to your computer and use it in GitHub Desktop.
Save alevz257/fda6d0eefef56d4853459c66d9189167 to your computer and use it in GitHub Desktop.
from langchain.llms import VertexAI
from langchain import PromptTemplate, LLMChain
from langchain.memory import ConversationBufferMemory
from langchain.agents import Tool
from langchain.agents import tool
from langchain.agents import AgentType
from langchain.utilities import SerpAPIWrapper
from langchain.utilities import OpenWeatherMapAPIWrapper
from langchain.agents import initialize_agent
import os
import streamlit as st
@tool
def get_word_length(word: str) -> int:
"""Returns the length of a word."""
return len(word)
@st.cache_resource(show_spinner=False)
def LLM_init():
os.environ["OPENWEATHERMAP_API_KEY"] = "<API KEY HERE>"
os.environ["SERPAPI_API_KEY"] = "<API KEY HERE>"
weather = OpenWeatherMapAPIWrapper()
search = SerpAPIWrapper()
tools =[
Tool(
name="current search",
func=search.run,
description="useful for when you need to answer questions about current events or the current state of the world"
),
Tool(
name = "weather",
func = weather.run,
description = "Return current weather data based on location"
),
Tool(
name = "getlength",
func = get_word_length,
description = "Return the length of a word"
),
]
memory = ConversationBufferMemory(memory_key="chat_history")
llm_chain = initialize_agent(
tools,
agent=AgentType.CONVERSATIONAL_REACT_DESCRIPTION,
llm=VertexAI(),
memory=memory,
verbose=True,
)
return llm_chain
st.set_page_config(page_title="🦜🔗 Demo App")
st.title('🦜🔗 Demo App')
st.title("💬 Travel Assistant")
if "messages" not in st.session_state:
st.session_state["messages"] = [{"role": "assistant", "content": "Hi my name is Melali and I am your travel consultant, how can I help you?"}]
#"st.session_state:", st.session_state.messages
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])
if prompt := st.chat_input():
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
# with st.spinner('Preparing'):
llm_chain = LLM_init()
msg = llm_chain(prompt)
#st.write(msg)
#st.write(len(msg))
#st.write(type(msg))
st.session_state.messages.append({"role": "assistant", "content": msg["output"]})
st.chat_message("assistant").write(msg["output"])
@faizagulzarahmed2303-001-KHI-DEG

hi

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment