Created
April 29, 2023 16:38
-
-
Save adriancooney/2a6fe301a8452b367ebae78153cb3a47 to your computer and use it in GitHub Desktop.
Create a ReadableStream from a OpenAI GPT3/4 LangChain chain for Edge streaming
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { OpenAI } from "langchain/llms/openai"; | |
import { PromptTemplate } from "langchain/prompts"; | |
import { LLMChain } from "langchain/chains"; | |
import { streamCall } from "./stream-call" | |
export const config = { | |
runtime: "edge", | |
}; | |
export async function GET(request: Request) { | |
const model = new OpenAI({ | |
streaming: true, | |
}); | |
const template = "What is a good name for a company that makes {product}?"; | |
const prompt = new PromptTemplate({ template, inputVariables: ["product"] }); | |
const chain = new LLMChain({ llm: model, prompt }); | |
return new Response(streamCall(chain, { product: "colorful socks" })); | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
function streamCall(chain: BaseChain, inputs: ChainValues): ReadableStream { | |
const encoder = new TextEncoder(); | |
return new ReadableStream({ | |
async start(controller) { | |
await chain.call(inputs, [ | |
{ | |
handleLLMNewToken(token) { | |
controller.enqueue(encoder.encode(token)); | |
}, | |
handleChainError(err) { | |
controller.error(err); | |
}, | |
}, | |
]); | |
controller.close(); | |
}, | |
}); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment