Created
July 4, 2012 00:04
-
-
Save spion/3044268 to your computer and use it in GitHub Desktop.
node.js scaling problem
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* Module dependencies. | |
*/ | |
var async = require('async'); | |
var cluster = require('cluster'); | |
var http = require('http'); | |
var url = require('url'); | |
// Lets say we have a backend server which | |
// does some data conversion. Unfortunately | |
// it can only process one item | |
// at a time and it takes about 500ms | |
// for that server to process the result | |
// and answer with data. | |
// | |
// We need to queue our requests to this | |
// server and send them one at a time. | |
// Here is how the testing will work | |
var runTest = function() { | |
// Lets fire up some test clients. | |
console.log("Beginning test in 2 seconds"); | |
// Every 2 seconds, 3 clients request a "conversion" job | |
// from our slow server, at once. The first should be served | |
// immediately, the next two should be queued and served after | |
// the first one is served. If this doesn't happen and requests | |
// are sent in parallel the real conversion server will break. | |
// | |
// So basically on the output we should have | |
// client 0 got response in ~500ms, client 1 in ~1000ms, client 2 in ~1500ms | |
var attempts = 3; | |
setInterval(function() { | |
if (!attempts--) process.exit(); // end after 3 batches (about 15 seconds) | |
var testBegin = new Date().getTime(); | |
console.log("Batch begins"); | |
for (var k = 0; k < 3; ++k) { | |
console.log("Client", k, "requested conversion"); | |
http.get('http://localhost:8123/', function(res) { | |
console.log("Client", k, "got response in", new Date().getTime() - testBegin, "ms"); | |
}); | |
} | |
}, 2000); | |
} | |
// We simulate our hypothetical conversion server | |
// with setTimeout. We send its request | |
// using async.queue limited to 1 concurrent task. | |
var queue = async.queue(function(task, cb) { | |
setTimeout(function(){ | |
task.res.end("Done processing"); | |
cb && cb(); | |
}, 500); | |
}, 1); | |
// Lets make a server cluster with 4 workers | |
if (cluster.isMaster) { | |
// Fork workers. | |
for (var i = 0; i < 4; i++) { cluster.fork(); } | |
cluster.on('exit', function(worker, code, signal) { | |
console.log('worker ' + worker.pid + ' died'); | |
}); | |
// run test clients. | |
runTest(); | |
} else { | |
// Workers can share any TCP connection | |
// In this case its a HTTP server | |
http.createServer(function(req, res) { | |
// In the real code most of the requests are | |
// not conversion requests. As such, they | |
// dont access the backend server at all, so | |
// clustering is beneficial for them | |
// | |
// Add this request to the queue. | |
queue.push({req: req, res: res}); | |
}).listen(8123); | |
}; | |
// lets see what happens? | |
// $ node app.js | |
// Beginning test in 2 seconds | |
// Batch begins | |
// Client 0 requested conversion | |
// Client 1 requested conversion | |
// Client 2 requested conversion | |
// Client 3 got response in 514 ms | |
// Client 3 got response in 515 ms | |
// Client 3 got response in 518 ms | |
// Batch begins | |
// Client 0 requested conversion | |
// Client 1 requested conversion | |
// Client 2 requested conversion | |
// Client 3 got response in 503 ms | |
// Client 3 got response in 504 ms | |
// Client 3 got response in 505 ms | |
// Batch begins | |
// Client 0 requested conversion | |
// Client 1 requested conversion | |
// Client 2 requested conversion | |
// Client 3 got response in 503 ms | |
// Client 3 got response in 508 ms | |
// Client 3 got response in 508 ms | |
// Requests are not queued since now there is one separate queue per worker :( |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment