Created
March 28, 2020 20:43
-
-
Save carocad/45e3bdab400c17feb04a27591996f9aa to your computer and use it in GitHub Desktop.
node.js lazy parser
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const fs = require('fs'); | |
async function* lineSplitter(filepath) { | |
const content = fs.createReadStream(filepath, { | |
encoding: 'utf8', | |
}); | |
let previous = ''; | |
for await (const chunk of content) { | |
const lines = (previous + chunk).split('\n'); | |
for (const line of lines.slice(0, lines.length - 1)) { | |
yield line; | |
} | |
previous = lines[lines.length]; | |
} | |
if (previous) { | |
yield previous; | |
} | |
} | |
async function main() { | |
const content = lineSplitter('../resources/stop_times.csv'); | |
for await (const line of content) { | |
console.log(line); | |
} | |
} | |
main().then(() => process.exit(0)); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
An example of how to use Node.js async generator api (v10.x) to process big chunks of data lazily