Skip to content

Instantly share code, notes, and snippets.

from langchain.prompts import ChatPromptTemplate
from langchain.chat_models import ChatOpenAI
from langchain.schema.output_parser import StrOutputParser
from langchain.vectorstores import Chroma
from langchain.embeddings import OpenAIEmbeddings
from langchain.schema.runnable import RunnablePassthrough
from langchain.schema.runnable import RunnableMap
from langchain.schema import format_document
from typing import AsyncGenerator
from langchain import PromptTemplate, OpenAI, LLMChain
prompt_template = "What is a good name for a company that makes {product}?"
llm1 = OpenAI(model="foo", temperature=0)
llm_chain1 = LLMChain(
llm=llm1,
prompt=PromptTemplate.from_template(prompt_template)
)
from langchain.agents import load_tools
from langchain.agents import initialize_agent
from langchain.agents import AgentType
from langchain.llms import OpenAI
llm = OpenAI(temperature=0, model="gpt-3.5-turbo-instruct")
from metaphor_python import Metaphor
client = Metaphor("")
from langchain.prompts import PromptTemplate
from langchain.chat_models import ChatAnthropic
from langchain.schema.output_parser import StrOutputParser
#### ROUTER
# This is the router - responsible for chosing what to do
chain = PromptTemplate.from_template("""Given the user question below, classify it as either being about `weather` or `other`.
Do not respond with more than one word.
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.schema.output_parser import StrOutputParser
import requests
from bs4 import BeautifulSoup
from langchain.schema.runnable import RunnablePassthrough, RunnableLambda
from langchain.utilities import DuckDuckGoSearchAPIWrapper
import json
RESULTS_PER_QUESTION = 3
from langchain.chains.openai_functions import create_structured_output_runnable
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.pydantic_v1 import BaseModel, Field
class Insight(BaseModel):
insight: str = Field(description="""insight""")
chat_model = ChatOpenAI(model_name="gpt-4-1106-preview")
from langchain.vectorstores import Pinecone
from langchain.embeddings.openai import OpenAIEmbeddings
import pinecone
# The environment should be the one specified next to the API key
# in your Pinecone console
pinecone.init(
api_key="...", environment="..."
)
index = pinecone.Index("test123")
class AgentState(TypedDict):
code: str
tests: str
errors: Optional[str]
def initial_writer(state):
...
return {"code": ..., "tests": ...}