Skip to content

Instantly share code, notes, and snippets.

class AgentState(TypedDict):
code: str
tests: str
errors: Optional[str]
def initial_writer(state):
...
return {"code": ..., "tests": ...}
from langchain.vectorstores import Pinecone
from langchain.embeddings.openai import OpenAIEmbeddings
import pinecone
# The environment should be the one specified next to the API key
# in your Pinecone console
pinecone.init(
api_key="...", environment="..."
)
index = pinecone.Index("test123")
from langchain.chains.openai_functions import create_structured_output_runnable
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.pydantic_v1 import BaseModel, Field
class Insight(BaseModel):
insight: str = Field(description="""insight""")
chat_model = ChatOpenAI(model_name="gpt-4-1106-preview")
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.schema.output_parser import StrOutputParser
import requests
from bs4 import BeautifulSoup
from langchain.schema.runnable import RunnablePassthrough, RunnableLambda
from langchain.utilities import DuckDuckGoSearchAPIWrapper
import json
RESULTS_PER_QUESTION = 3
from langchain.prompts import PromptTemplate
from langchain.chat_models import ChatAnthropic
from langchain.schema.output_parser import StrOutputParser
#### ROUTER
# This is the router - responsible for chosing what to do
chain = PromptTemplate.from_template("""Given the user question below, classify it as either being about `weather` or `other`.
Do not respond with more than one word.
from langchain.agents import load_tools
from langchain.agents import initialize_agent
from langchain.agents import AgentType
from langchain.llms import OpenAI
llm = OpenAI(temperature=0, model="gpt-3.5-turbo-instruct")
from metaphor_python import Metaphor
client = Metaphor("")
from langchain import PromptTemplate, OpenAI, LLMChain
prompt_template = "What is a good name for a company that makes {product}?"
llm1 = OpenAI(model="foo", temperature=0)
llm_chain1 = LLMChain(
llm=llm1,
prompt=PromptTemplate.from_template(prompt_template)
)
from langchain.prompts import ChatPromptTemplate
from langchain.chat_models import ChatOpenAI
from langchain.schema.output_parser import StrOutputParser
from langchain.vectorstores import Chroma
from langchain.embeddings import OpenAIEmbeddings
from langchain.schema.runnable import RunnablePassthrough
from langchain.schema.runnable import RunnableMap
from langchain.schema import format_document
from typing import AsyncGenerator
from langchain.chat_models import ChatAnthropic
from kor import create_extraction_chain, Object, Text
llm = ChatAnthropic(
model_name="claude-v2",
temperature=0,
max_tokens=2000,
)
schema = Object(
import pandas as pd
from pandasai import PandasAI
# Sample DataFrame
df = pd.DataFrame({
"country": ["United States", "United Kingdom", "France", "Germany", "Italy", "Spain", "Canada", "Australia", "Japan", "China"],
"gdp": [19294482071552, 2891615567872, 2411255037952, 3435817336832, 1745433788416, 1181205135360, 1607402389504, 1490967855104, 4380756541440, 14631844184064],
"happiness_index": [6.94, 7.16, 6.66, 7.07, 6.38, 6.4, 7.23, 7.22, 5.87, 5.12]
})