Skip to content

Instantly share code, notes, and snippets.

@raydog
Created June 16, 2014 22:38
Show Gist options
  • Save raydog/64146e0bfa2029169673 to your computer and use it in GitHub Desktop.
Save raydog/64146e0bfa2029169673 to your computer and use it in GitHub Desktop.
UDP experiment with Node.js
var dgram = require('dgram');
function Timer() {
this.st = Date.now();
}
Timer.prototype.poll = function poll(name) {
var now = Date.now();
console.log("%s :: %d", (name || "--"), (now - this.st));
this.st = now;
};
// Strict loop, opening + closing UDP sockets:
function testStrictLoop(num) {
num = num || 1;
var socket = dgram.createSocket("udp4");
var msg = new Buffer("STRICT_LOOP");
socket.send(msg, 0, msg.length, 8123, "localhost", function (err, bytes) {
socket.close();
if (num < 100000) {
process.nextTick(testStrictLoop.bind(null, num + 1));
} else {
timer.poll("strict loop");
setImmediate(testCachedStrictLoop);
}
});
}
// Loop that depends on back-pressure while opening + closing UDP sockets:
// NOTE: This strategy errors with <<EMFILE>>:
// var backpressure_count = 0;
// function _backPressureSend() {
// var socket = dgram.createSocket("udp4");
// var msg = new Buffer("BACKPRESSURE");
// backpressure_count ++;
// socket.send(msg, 0, msg.length, 8123, "localhost", function (err, bytes) {
// if (err) {
// timer.poll
// }
// socket.close();
// backpressure_count --;
// if (!backpressure_count) {
// timer.poll("backpressure");
// setImmediate(testCachedStrictLoop);
// }
// });
// }
// // Async utilizing back-pressure:
// function testBackPressure() {
// // Strict loop:
// for (var i=0; i<100000; i++) {
// _backPressureSend()
// }
// }
// Strict with cached socket:
var cached_socket = dgram.createSocket("udp4");
function testCachedStrictLoop(num) {
num = num || 1;
// Strict loop:
var msg = new Buffer("CACHED_LOOP");
cached_socket.send(msg, 0, msg.length, 8123, "localhost", function (err, bytes) {
if (err) {
console.err(err.stack);
process.exit(1);
}
if (num < 100000) {
process.nextTick(testCachedStrictLoop.bind(null, num + 1));
} else {
timer.poll("cached loop");
setImmediate(testCachedBackPressure);
}
});
}
// Async with cached socket:
var async_cached_socket = dgram.createSocket("udp4");
var async_backpressure_count = 0;
function _cachedBackPressureSend() {
var msg = new Buffer("CACHED_BACKPRESSURE");
async_backpressure_count ++;
async_cached_socket.send(msg, 0, msg.length, 8123, "localhost", function (err, bytes) {
if (err) {
console.err(err.stack);
process.exit(1);
}
async_backpressure_count --;
if (!async_backpressure_count) {
timer.poll("backpressure cached");
console.log("Done");
setTimeout(function () {
msg = new Buffer("__FULL_PRINT__");
async_cached_socket.send(msg, 0, msg.length, 8123, "localhost", function () {
process.exit(0);
});
}, 5000);
}
});
}
// Async utilizing back-pressure:
function testCachedBackPressure() {
// Strict loop:
for (var i=0; i<100000; i++) {
_cachedBackPressureSend();
}
console.log("done sending. backpressure at", async_backpressure_count);
}
// Let's kick things into motion!
var timer = new Timer();
testStrictLoop();
var dgram = require('dgram');
var counts = {};
var socket = dgram.createSocket("udp4");
socket.bind(8123, function () {
// socket.addMembership('0.0.0.0');
});
var recv = {};
socket.on('message', function (msg, rinfo) {
var lol = msg.toString();
if (lol ==="__FULL_PRINT__") {
console.log(counts);
return;
}
recv[lol] = (recv[lol] || 0) + 1;
var data = counts[lol] = counts[lol] || { n: 0, s: Date.now(), e: null };
data.n ++;
if (data.n >= 100000) {
data.e = Date.now();
console.log("=============");
console.log(lol, data.e - data.s);
}
});
setInterval(function () {
console.log("RECEIVED", recv);
recv = {};
}, 1000);

#UDP Sending Strategy Experiment

Tested 4 different strategies for sending UDP packets in Node.js: create/destroy socket in series, create/destroy socket in parallel, cached socket in series, and cached socket in parallel.

Test Results

Create/Destroy in Series

This strategy would create a UDP4 socket every time it wanted to send something, and destroy it when it was done. It would also dispatch events one-at-a-time, so the network performance would effect how long it took to send all messages.

Results:

  • Server: Received ~4900 messages per second.
  • Client: Took 20.4 seconds to send 100000 messages.

Create/Destroy in Parallel

This strategy would create a UDP4 socket every time it wanted to send something, and destroy it when it was done. It would try to dispatch all events synchronously, and depend on Node.js backpressure to mitigate the network overhead.

Results:

  • Server: ** Crashed: Out of file handles **
  • Client: ** Crashed: Out of file handles **

Cached Socket in Series

This strategy would create one UDP4 socket, and use it for all connections. It would also dispatch events one-at-a-time, so the network performance would effect how long it took to send all messages.

Results:

  • Server: Received ~6300 messages per second.
  • Client: Took 15.8 seconds to send 100000 messages.

Cached Socket in Parallel

This strategy would create one UDP4 socket, and use it for all connections. It would try to dispatch all events synchronously, and depend on Node.js backpressure to mitigate the network overhead.

Results:

  • Server: Received ~13300 messages per second.
  • Client: Took 7.76 seconds to send 100000 messages.
  • Other: 3.9% Packet loss (Server UDP buffer too full, probably)

Conclusion

Caching a socket always performs better. Dispatching messages asynchronously give incredible performance, but if server gets swamped, you can get packet loss. Series dispatches not losing packets makes sense, since only one message is in flight at a time. (Pretty much.)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment