Skip to content

Instantly share code, notes, and snippets.

@m00zi
Forked from cowboy/1-file.txt
Created May 31, 2018 12:08
Show Gist options
  • Save m00zi/88defbee3ce19f4468611b50d94557b3 to your computer and use it in GitHub Desktop.
Save m00zi/88defbee3ce19f4468611b50d94557b3 to your computer and use it in GitHub Desktop.
Iterate over all lines in a file, handing extra trailing newlines
foo bar
baz
qux
last line (there may or may not be a trailing newline after this line)
start
0 foo bar
1 baz
2 qux
3
4 last line (there may or may not be a trailing newline after this line)
end
#!/usr/bin/env bash
# A basic "while read line" will skip the last line if there is no
# trailing newline, because "read line" exits with an exit code of 1
# in that case. So we need to be a little more explicit.
echo "start"
index=0
eof=
until $eof; do
read line || eof=1
echo "$index $line"
((index++))
done < file.txt
echo "end"
#!/usr/bin/env node
// Asynchronous file IO is a little bit more involved than Synchronous
// file IO: Read file and split on newlines. Log each line as long as
// it's not a blank last line. Notice that any code to be run after the
// file is read must be run inside the callback passed to fs.readFile.
// Promises could help with that, but at the expense of requiring a
// third party library (and the cognitive load that promises demand).
var fs = require("fs");
console.log("start");
fs.readFile("file.txt", function(error, data) {
if (error) { throw error; }
data.toString().split("\n").forEach(function(line, index, arr) {
if (index === arr.length - 1 && line === "") { return; }
console.log(index + " " + line);
});
console.log("end");
});
#!/usr/bin/env node
// Streaming IO is great, but far more complex than Synchronous or
// Asynchronous IO, unless you use a pre-built third party library.
// Create a read stream, accumulating data chunks. When complete
// lines are available, process them. When the stream ends, process
// remaining lines. I'm actually not even sure I've properly handled
// extra trailing newlines here.
var fs = require("fs");
console.log("start");
var index = 0;
function withLines(lines) {
lines.forEach(function(line) {
console.log(index + " " + line);
index++;
});
}
var accumulated = "";
fs.createReadStream("file.txt").on("data", function(data) {
var parts = (accumulated + data).split("\n");
accumulated = parts.pop();
withLines(parts);
}).on("end", function() {
if (accumulated.length > 0) {
withLines(accumulated.split("\n"));
}
console.log("end");
});
#!/usr/bin/env node
// Synchronous file IO is about as easy as it gets in Node.js: Read file
// and split on newlines. Log each line as long as it's not a blank last
// line.
var fs = require("fs");
console.log("start");
fs.readFileSync("file.txt").toString().split("\n").forEach(function(line, index, arr) {
if (index === arr.length - 1 && line === "") { return; }
console.log(index + " " + line);
});
console.log("end");
#!/usr/bin/env ruby
# :)
puts "start"
File.foreach("file.txt").with_index do |line, index|
puts "#{index} #{line.chomp}"
end
puts "end"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment