Skip to content

Instantly share code, notes, and snippets.

@dbredvick
Created December 28, 2023 17:57
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dbredvick/104956e1132f5a6cb99bb69a86bc4134 to your computer and use it in GitHub Desktop.
Save dbredvick/104956e1132f5a6cb99bb69a86bc4134 to your computer and use it in GitHub Desktop.
Stream markdown in RSC
import Markdown from "react-markdown"
import OpenAI from "openai"
import { OpenAIStream } from "ai"
async function Reader({
reader,
accumulatedText = "",
}: {
reader: ReadableStreamDefaultReader<Uint8Array>;
accumulatedText?: string;
}) {
const { done, value } = await reader.read();
if (done) {
// @ts-ignore
return <Markdown components={{ pre: null, a: null }}>{accumulatedText}</Markdown>;
}
const text = new TextDecoder().decode(value);
const newText = accumulatedText + text;
const lastNewlineIndex = newText.lastIndexOf("\n");
if (lastNewlineIndex === -1) {
return <Reader reader={reader} accumulatedText={newText} />;
}
const completeText = newText.slice(0, lastNewlineIndex);
const remainingText = newText.slice(lastNewlineIndex + 1);
return (
<>
<Markdown>{completeText}</Markdown>
<Suspense>
<Reader reader={reader} accumulatedText={remainingText} />
</Suspense>
</>
);
}
export default async function Page(){
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
const chatResponse = await openai.chat.completions.create({
model: "gpt-4-vision-preview",
max_tokens: 1000,
stream: true,
messages: [
// TODO: add messages
],
},
],
});
const stream = OpenAIStream(chatResponse, { onFinal(completion){
const markdown = completion;
kv.set(cacheKey, markdown);
}});
const reader = stream.getReader();
// We recursively render the stream as it comes in
return (
<Suspense>
<Reader reader={reader} />
</Suspense>
);
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment