Skip to content

Instantly share code, notes, and snippets.

@ehartford
Last active April 15, 2024 04:10
Show Gist options
  • Save ehartford/ef5d23bf9a43b9a2467f9c1285815f68 to your computer and use it in GitHub Desktop.
Save ehartford/ef5d23bf9a43b9a2467f9c1285815f68 to your computer and use it in GitHub Desktop.
// npm i express axios && node ./oailogger.js
const express = require('express');
const axios = require('axios');
const bodyParser = require('body-parser');
const stream = require('stream');
const { promisify } = require('util');
const fs = require('fs');
const logStream = fs.createWriteStream('logs.jsonl', { flags: 'a' });
const app = express();
const OPENAI_API_URL = 'https://api.openai.com';
const pipeline = promisify(stream.pipeline);
app.use(bodyParser.raw({ type: '*/\*' }));
app.all('*', async (req, res) => {
const path = req.originalUrl;
const url = `${OPENAI_API_URL}${path}`;
const headers = { ...req.headers, 'host': new URL(OPENAI_API_URL).host };
const isStream = JSON.parse(req.body.toString()).stream;
try {
const response = await axios({
method: req.method,
url: url,
headers: headers,
data: req.method === 'POST' ? req.body : undefined,
responseType: isStream ? 'stream' : 'json'
});
if (req.method === 'POST') {
if (isStream) {
const responseChunks = [];
response.data.on('data', (chunk) => {
responseChunks.push(chunk);
});
await pipeline(response.data, res);
// Process and log the complete response
const completeResponse = Buffer.concat(responseChunks).toString('utf-8');
const parsedResponse = completeResponse.split('\n')
.map(line => {
if (line.startsWith('data: ')) {
try {
return JSON.parse(line.substring('data: '.length));
} catch (error) {
return null; // Ignore lines that are not valid JSON
}
}
return null;
})
.filter(chunk => chunk !== null)
.map(chunk => chunk.choices.map(choice => choice.delta?.content).join(''))
.join('');
logStream.write(JSON.stringify({
request: JSON.parse(req.body.toString()),
response: parsedResponse
}));
logStream.write("\n");
} else {
// Process and log the complete response
const completeResponse = response.data.choices[0].message.content;
logStream.write(JSON.stringify({
request: JSON.parse(req.body.toString()),
response: completeResponse
}));
logStream.write("\n");
res.json(response.data);
}
} else {
// For non-POST requests, simply pipe the response or send JSON
if (isStream) {
await pipeline(response.data, res);
} else {
res.json(response.data);
}
}
} catch (error) {
console.error(`Error proxying request: ${error}`);
if (!res.headersSent) {
res.status(500).send('Internal Server Error');
}
}
});
const PORT = process.env.PORT || 5001;
app.listen(PORT, () => console.log(`Proxy server running on port ${PORT}`));
@ehartford
Copy link
Author

you can test like this

git clone https://github.com/mckaywrigley/chatbot-ui
cd chatbot-ui
echo "DEFAULT_MODEL=gpt-3.5-turbo" >> .env.local
echo "NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT=You are a helpful AI assistant" >> .env.local
echo "OPENAI_API_KEY=your_key" >> .env.local
echo "OPENAI_API_HOST=http://localhost:5000" >> .env.local
npm i
npm run dev

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment