public
Created

An example how to workaround node.js couchbase library when saving large number of documents. Use a pool of connections and make sure each connection doesn't get more than around 50 docs at a time.

  • Download Gist
couchbaseBulkSet.js
JavaScript
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
/**
* An example how to workaround node.js couchbase library when saving large number of documents.
* Use a pool of connections and make sure each connection doesn't get more than around 50 docs at a time.
*
* @author Mladen Markov (mladen.markov@gmail.com)
* @version 1.0 Feb 12, 2013 19:45
*/
 
var async = require("async");
var couchBase = require("couchbase");
 
var config = {
"host": "localhost",
"port": 8091,
"username": "Administrator",
"password": "password",
"bucket": "facebook-pages"
};
 
var buckets;
var current = 0;
 
async.times(100, function(n, callback) {
couchBase.connect(config, function(err, bucket) {
if (err) {
console.log("Unable to connect to Couchbase bucket [" + config.bucket + "]", err);
process.exit(1);
}
 
console.log("Connected to Couchbase.");
callback(null, bucket);
});
}, function(err, result) {
if (err) {
console.log("Unable to create Couchbase connection pool.");
process.exit(1);
}
 
buckets = result;
console.log("Created a Couchbase pool of [" + buckets.length + "] connections");
 
var jsonDocs = [];
for (var i = 0; i < 4000; i++) {
jsonDocs.push({id:i + ""});
}
 
console.log("Saving [" + jsonDocs.length + "] docs");
async.map(jsonDocs, function(doc, callback) {
var bucket = getBucket();
 
bucket.set(doc.id, doc, {}, function(err) {
callback(err);
});
}, function(err, results) {
if (err) {
console.log("Unable to save all entries", err);
process.exit(1);
}
 
console.log("Saved all entries");
process.exit(0);
});
});
 
function getBucket() {
if (current >= buckets.length) {
current = 0;
}
return buckets[current++];
}

Please sign in to comment on this gist.

Something went wrong with that request. Please try again.