Skip to content

Instantly share code, notes, and snippets.

@olessavluk
Created May 12, 2025 16:05
Show Gist options
  • Save olessavluk/8bdfb495c2b48c3baaab3dd704f85403 to your computer and use it in GitHub Desktop.
Save olessavluk/8bdfb495c2b48c3baaab3dd704f85403 to your computer and use it in GitHub Desktop.
import http from "node:http";
import { request, Agent, interceptors, setGlobalDispatcher } from "undici";
// Interceptors to add response caching, DNS caching and retrying to the dispatcher
const { cache, dns, retry } = interceptors;
const defaultDispatcher = new Agent({
connections: 100, // Limit concurrent kept-alive connections to not run out of resources
headersTimeout: 10_000, // 10 seconds; set as appropriate for the remote servers you plan to connect to
bodyTimeout: 10_000,
}).compose(
cache(),
dns(),
retry()
);
setGlobalDispatcher(defaultDispatcher); // Add these interceptors to all `fetch` and Undici `request` calls
async function run(n) {
console.log(`Runnig ${n} request sequentially`);
for (let i = 0; i <= n; i++) {
// const query = { _: new Date().toISOString(), i };
const query = { i };
const res = await request({
origin: "http://localhost:8080",
path: `?i=${i % 10}`,
// query,
headers: {
"Content-Tupe": "application/json",
UserAgent: "UndiciExample/1.0.0",
},
});
const text = await res.body.text();
}
};
const N = 1000;
const bigChunk = new Array(N)
.fill(0)
.map((i) => new Array(N).fill(0).map(() => Math.random()));
const bigJSON = JSON.stringify(bigChunk, null, 2);
const server = new http.Server((req, res) => {
res
.writeHead(200, {
"Content-Type": "application/json",
// memory will start leaking after this time
// "Cache-Control": `public, max-age=${String(1)}`,
"Cache-Control": `"public, max-age=1, stale-while-revalidate=1"`,
})
.end(bigJSON);
})
server.listen("8080");
const sleep = (t) => new Promise((resolve) => setTimeout(resolve, t));
const logMemory = () => {
global.gc?.();
console.log(
Object.fromEntries(
Object.entries(process.memoryUsage()).map(([k, v]) => [
k,
v / 1024 / 1024,
])
)
);
}
global.logMemory = logMemory;
global.run = run;
logMemory();
await run(100);
logMemory();
await run(100);
logMemory();
console.log("Sleeping for 2s to trigger GC");
await sleep(2000);
logMemory();
console.log("^ see how much `external` & `arrayBuffers` increased with each run?");
// server.close();
{
rss: 107.09375,
heapTotal: 72.96875,
heapUsed: 37.90514373779297,
external: 3.2410106658935547,
arrayBuffers: 0.1122894287109375
}
Runnig 100 request sequentially
{
rss: 2600.625,
heapTotal: 2205.65625,
heapUsed: 2170.1848220825195,
external: 281.41249084472656,
arrayBuffers: 231.66439628601074
}
Runnig 100 request sequentially
{
rss: 2736.78125,
heapTotal: 2044.453125,
heapUsed: 2008.7924270629883,
external: 512.9489650726318,
arrayBuffers: 486.3542881011963
}
Sleeping for 2s to trigger GC
{
rss: 2737.296875,
heapTotal: 75.09375,
heapUsed: 40.322914123535156,
external: 466.6343173980713,
arrayBuffers: 463.193058013916
}
^ see how much `external` & `arrayBuffers` increased with each run?
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment