Skip to content

Instantly share code, notes, and snippets.

@CocoaPriest
Last active January 17, 2024 12:22
Show Gist options
  • Save CocoaPriest/29d91d92e9ab4730eb0f601352653cc2 to your computer and use it in GitHub Desktop.
Save CocoaPriest/29d91d92e9ab4730eb0f601352653cc2 to your computer and use it in GitHub Desktop.
AWS Lambda proxy designed to forward OpenAI calls back and forth
import OpenAI from "openai";
import stream from "stream";
import { Transform } from "stream";
import util from "util";
const pipeline = util.promisify(stream.pipeline);
// Save your `OPENAI_API_KEY` in Lambda env.
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
export const handler = awslambda.streamifyResponse(async (event, responseStream, _context) => {
// Some input. Also, good time to check if user is eligible to call this endpoint (check StoreKit's transaction_id etc.)
const inputJson = JSON.parse(event.body);
try {
// First, just get the stream to OpenAI
const { data: stream, response: raw } = await completion_stream(inputJson);
// `raw` is useful to read OpenAI headers, for ex.:
// `raw.headers.get("x-ratelimit-remaining-requests")`
// Make any transformations, forward to `responseStream`
await process_stream(stream, responseStream);
} catch (err) {
write_error_to_stream(err, responseStream); // Have your own logic here or just end() the stream
}
});
async function completion_stream(inputJson) {
const params = {
messages: [
{
role: "system",
content: "/* System Prompt */",
},
{
role: "user",
content: [
{
type: "text",
text: "/* User Prompt based on `inputJson` */",
},
],
},
],
model: "/* Model */",
stream: true,
};
return await openai.chat.completions.create(params).withResponse();
}
async function process_stream(stream, responseStream) {
const transformStream = new Transform({
readableObjectMode: false,
writableObjectMode: true,
transform(chunk, encoding, callback) {
try {
const content = chunk.choices[0]?.delta?.content || "";
// Here's a good place to pre-process content chunks, before forwarding them
callback(null, content);
} catch (err) {
callback(err);
}
},
});
try {
await pipeline(
stream, // Readable stream from the OpenAI API
transformStream, // Transform stream to handle and modify the data
responseStream // Writable stream where the data will be written
);
console.log("Pipeline finished");
} catch (err) {
console.error("Pipeline failed", err);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment