Skip to content

Instantly share code, notes, and snippets.

@dandv
Created May 12, 2013 10:52
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dandv/5563145 to your computer and use it in GitHub Desktop.
Save dandv/5563145 to your computer and use it in GitHub Desktop.
Why is it so cumbersome to do really simple things in node, like reading records from a file one by one? In Perl, C, PHP, Java, whatever, you'd just run: while ( record = file.fetch() ) { /* do stuff with record */ }
/*jslint node: true*/
'use strict';
var fs = require('fs');
var csv = require('csv');
var async = require('async');
// note, io.listen(<port>) will create a http server for you
//var io = require('socket.io').listen(8901);
//io.sockets.on('connection', function (socket) {
// console.log("On connection");
var handle = csv().from('day,temp\nMon,20\nTue,22\nWed,25\nThu,23', { columns: true });
handle.fetch = function(howMany) {
var self = this;
console.error('Starting the fetch at', self.indexThatsNotBroken);
self.indexThatsNotBroken |= 0;
var stopAt = self.indexThatsNotBroken + howMany;
self
.transform(
function (record, index, callback) {
console.error(self.indexThatsNotBroken, record);
self.indexThatsNotBroken++;
if (self.indexThatsNotBroken >= stopAt) { return; }
process.nextTick(function () {
callback();
});
},
{parallel: 1}
);
console.error('Stopped the fetch at', self.indexThatsNotBroken);
};
async.series([
function (callback) {
console.error('Async series call #1');
handle.fetch(2);
callback(null, 1);
},
function (callback) {
console.error('We are at', handle.indexThatsNotBroken);
callback(null, 1);
},
function (callback) {
console.error('Async series call #2');
handle.fetch(2);
console.error('We are at', handle.indexThatsNotBroken);
callback(null, 1);
},
function (callback) {
console.error('Async series call #3');
handle
.on("end", function () {
console.error("done"); // doesn't happen
});
callback(null, 1);
}
]);
@chacliff
Copy link

chacliff commented Apr 6, 2015

i'm wondering the exact same thing, node appears to be importing entire large amounts of data before doing the next portion of execution

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment