$ node bench-01-pull-stream.js
pull3*100000: 1995.798ms
pull_compose*100000: 1968.901ms
pull_chain*100000: 1871.930ms
$ node bench-02-async-stream.js
async3*100000: 3295.666ms
async3*100000: 3145.636ms
async3*100000: 2943.988ms
$ node bench-03-async-stream-improved.js
async3*100000: 1720.648ms
async3*100000: 1742.325ms
async3*100000: 1617.521ms
- async-streams are composed by default, hence the same benchmark function was repeated three times
- in
bench-03-async-stream-improved
thefastbench
library was made promise aware, yielding in a huge performance boost
const bench = require('fastbench') | |
const pull = require('../') | |
const values = [ | |
JSON.stringify({ hello: 'world' }), | |
JSON.stringify({ foo: 'bar' }), | |
JSON.stringify({ bin: 'baz' }) | |
] | |
const run = bench([ | |
function pull3 (done) { | |
const source = pull.values(values) | |
const through = pull.asyncMap(function (val, done) { | |
const json = JSON.parse(val) | |
done(null, json) | |
}) | |
const sink = pull.collect(function (err, array) { | |
if (err) return console.error(err) | |
setImmediate(done) | |
}) | |
pull(source, through, sink) | |
}, | |
function pull_compose (done) { | |
const source = pull.values(values) | |
const through = pull.asyncMap(function (val, done) { | |
const json = JSON.parse(val) | |
done(null, json) | |
}) | |
const sink = pull.collect(function (err, array) { | |
if (err) return console.error(err) | |
setImmediate(done) | |
}) | |
pull(source, pull(through, sink)) | |
}, | |
function pull_chain (done) { | |
const source = pull.values(values) | |
const through = pull.asyncMap(function (val, done) { | |
const json = JSON.parse(val) | |
done(null, json) | |
}) | |
const sink = pull.collect(function (err, array) { | |
if (err) return console.error(err) | |
setImmediate(done) | |
}) | |
pull(pull(source, through), sink) | |
} | |
], 100000) | |
run() | |
const bench = require('fastbench') | |
const values = [ | |
JSON.stringify({ hello: 'world' }), | |
JSON.stringify({ foo: 'bar' }), | |
JSON.stringify({ bin: 'baz' }) | |
] | |
const run = bench([ | |
function async3 (done) { | |
(async () => { | |
let i = 0 | |
const source = () => async () => values[i++] | |
const through = read => async () => { | |
const str = await read() | |
if (!str) return | |
return JSON.parse(str) | |
} | |
const sink = async read => { | |
let data | |
while (data = await read()) {} | |
} | |
const read = through(source()) | |
await sink(read) | |
setImmediate(done) | |
})() | |
}, | |
function async3 (done) { | |
(async () => { | |
let i = 0 | |
const source = () => async () => values[i++] | |
const through = read => async () => { | |
const str = await read() | |
if (!str) return | |
return JSON.parse(str) | |
} | |
const sink = async read => { | |
let data | |
while (data = await read()) {} | |
} | |
const read = through(source()) | |
await sink(read) | |
setImmediate(done) | |
})() | |
}, | |
function async3 (done) { | |
(async () => { | |
let i = 0 | |
const source = () => async () => values[i++] | |
const through = read => async () => { | |
const str = await read() | |
if (!str) return | |
return JSON.parse(str) | |
} | |
const sink = async read => { | |
let data | |
while (data = await read()) {} | |
} | |
const read = through(source()) | |
await sink(read) | |
setImmediate(done) | |
})() | |
} | |
], 100000) | |
run() | |
// | |
// fastbench modification that supports promise wrappers | |
// | |
'use strict' | |
var fastseries = require('fastseries') | |
var chalk = require('chalk') | |
var colors = ['red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white', 'gray'] | |
var console = require('console') | |
function fastbench (functions, opts) { | |
var max | |
var series = fastseries() | |
var currentColor = 0 | |
if (typeof opts === 'object') { | |
max = opts.max || opts.iterations | |
} else { | |
max = opts | |
} | |
if (!max) { | |
throw new Error('missing number of iterations') | |
} | |
return run | |
function run (done) { | |
currentColor = 0 | |
series(null, bench, functions, done || noop) | |
} | |
function bench (func, done) { | |
var key = func.name + '*' + max | |
var count = -1 | |
// true by default | |
if (opts.color !== false) { | |
key = chalk[nextColor()](key) | |
} | |
console.time(key) | |
end() | |
function end () { | |
if (++count < max) { | |
func().then(end) | |
} else { | |
console.timeEnd(key) | |
if (done) { | |
done() | |
} | |
} | |
} | |
} | |
function nextColor () { | |
if (currentColor === colors.length) { | |
currentColor = 0 | |
} | |
return colors[currentColor++] | |
} | |
} | |
function noop () {} | |
// | |
// benchmark | |
// | |
const values = [ | |
JSON.stringify({ hello: 'world' }), | |
JSON.stringify({ foo: 'bar' }), | |
JSON.stringify({ bin: 'baz' }) | |
] | |
const run = fastbench([ | |
async function async3 () { | |
let i = 0 | |
const source = () => async () => values[i++] | |
const through = read => async () => { | |
const str = await read() | |
if (!str) return | |
return JSON.parse(str) | |
} | |
const sink = async read => { | |
let data | |
while (data = await read()) {} | |
} | |
const read = through(source()) | |
await sink(read) | |
}, | |
async function async3 () { | |
let i = 0 | |
const source = () => async () => values[i++] | |
const through = read => async () => { | |
const str = await read() | |
if (!str) return | |
return JSON.parse(str) | |
} | |
const sink = async read => { | |
let data | |
while (data = await read()) {} | |
} | |
const read = through(source()) | |
await sink(read) | |
}, | |
async function async3 () { | |
let i = 0 | |
const source = () => async () => values[i++] | |
const through = read => async () => { | |
const str = await read() | |
if (!str) return | |
return JSON.parse(str) | |
} | |
const sink = async read => { | |
let data | |
while (data = await read()) {} | |
} | |
const read = through(source()) | |
await sink(read) | |
} | |
], 100000) | |
run() | |
I've been running node 9, but even on node 8.9 on my mac I get this:
pull-streams
pull3*100000: 1971.349ms
pull_compose*100000: 1997.909ms
pull_chain*100000: 1931.397ms
async-streams
async3*100000: 3024.252ms
async3*100000: 3008.779ms
async3*100000: 2976.642ms
async-streams improved
async3*100000: 1772.148ms
async3*100000: 1691.614ms
async3*100000: 1698.988ms
hmm, weird, on 9 i get this:
pull-streams
pull3*100000: 1629.616ms
pull_compose*100000: 1570.193ms
pull_chain*100000: 1563.351ms
async-streams
async3*100000: 2799.237ms
async3*100000: 2800.672ms
async3*100000: 2771.162ms
async-streams improved
async3*100000: 2007.097ms
async3*100000: 1978.717ms
async3*100000: 1928.227ms
my laptop is faster! (that i wasn't expecting! it cost $250, 2nd hand!)
what pull-stream version are you using?
oh okay... I just realized something! pull.drain seems to be the heavy thing here! swap out pull.collect
which depends on pull.drain
for the simplest possible:
function collect (cb) {
var a = []
return function (read) {
return read(null, function next (err, data) {
if(err) cb(null, a)
else {
a.push(data)
read(null, next)
}
})
}
and I'm getting results like these:
min_pull3*100000: 683.974ms
min_pull_compose*100000: 761.145ms
min_pull_chain*100000: 640.089ms
💥
Hmm, if pull.drain
is that slow that is something we could improve.
It's as complicated as it is because it prevents stack overflows (in the case that a cb is actually sync) which is faster than having a stackoverflow... but this (micro) benchmark only has 3 items in the stream, so am not very worried about stack overflows, wondered what would be like with the minimum implementations... and turned out drain was the heaviest. hmm...
oh sorry, those previous results had asyncMap and values replaced too, but difference is ~100 ms
hmm, I get these results on my laptop (thinkpad x201) on node@8.9