Skip to content

Instantly share code, notes, and snippets.

@mulhoon
Last active May 20, 2024 07:37
Show Gist options
  • Save mulhoon/c5842b0b406f8a30c146893d75c00041 to your computer and use it in GitHub Desktop.
Save mulhoon/c5842b0b406f8a30c146893d75c00041 to your computer and use it in GitHub Desktop.
Streaming OpenAI GPT Completions in to Nuxt 3
// server/complete.post.js
import { Configuration, OpenAIApi } from 'openai-edge'
import { OpenAIStream } from 'ai'
import type { H3Event } from 'h3'
const { OPENAI_API_KEY } = useRuntimeConfig()
let openai
function sendStream(event: H3Event, stream: ReadableStream) {
// Mark to prevent h3 handling response
event._handled = true
// Workers (unenv)
// @ts-expect-error _data will be there.
event.node.res._data = stream
if (event.node.res.socket) {
stream.pipeTo(
new WritableStream({
write(chunk) {
event.node.res.write(chunk)
},
close() {
event.node.res.end()
}
})
)
}
}
// Required for Vercel Edge
// export const config = {
// runtime: 'edge'
// }
export default defineEventHandler(async (event) => {
setResponseHeaders(event, {
'Access-Control-Allow-Methods': 'GET,OPTIONS,PATCH,DELETE,POST,PUT',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Allow-Headers':
'X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version',
'Access-Control-Expose-Headers': '*'
})
if (getMethod(event) === 'OPTIONS') {
event.res.statusCode = 204
event.res.statusMessage = 'No Content.'
return 'OK'
}
const body = await readBody(event)
const payload = JSON.parse(body)
if (!payload) {
return { error: 'no payload' }
}
if (!openai) {
const configuration = new Configuration({
apiKey: OPENAI_API_KEY
})
openai = new OpenAIApi(configuration)
}
const response = await openai.createChatCompletion(payload)
const stream = OpenAIStream(response)
return sendStream(event, stream)
})
<!-- pages/example.vue -->
<template>
<div>{{ result }}</div>
</template>
<script setup>
const { complete } = useAI()
const result = ref('')
complete(
{
messages: [{ role: 'user', content: 'Tell me a joke' }]
},
{
onText: (text) => {
result.value = text
},
onComplete: () => {
result.value = text
console.log('Finished!')
}
}
)
</script>
export default defineNuxtConfig({
runtimeConfig: {
OPENAI_API_KEY: process.env.OPENAI_API_KEY
}
})
// composables/useAI.js
const url = '/api/complete'
export const useAI = () => {
const complete = async (
{ messages },
{ onText = () => {}, onComplete = () => {} }
) => {
let textArr = []
const payload = JSON.stringify({
model: 'gpt-3.5-turbo', //'gpt-4', // 'gpt-4-0613',
temperature:1,
max_tokens: 256,
stream: true,
messages
})
console.log('Run completion', config)
const { body } = await fetch(url, {
method: 'POST',
body: payload
})
const reader = body.getReader()
const decoder = new TextDecoder()
let finished = false,
completion
while (!finished) {
const { value, done } = await reader.read()
finished = done
const chunkValue = decoder.decode(value)
textArr.push(chunkValue)
completion = textArr.join('')
onText(completion)
}
onComplete(completion)
}
return { complete }
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment