Skip to content

Instantly share code, notes, and snippets.

@Gozala
Created May 11, 2012 01:42
Show Gist options
  • Save Gozala/2656978 to your computer and use it in GitHub Desktop.
Save Gozala/2656978 to your computer and use it in GitHub Desktop.
Experimenting with clojure reducers
Array.prototype.reduce = this.ArrayReduce || Array.prototype.reduce
var console = window.console
var global = this
function assert(actual, expected) {
if (arguments.length < 2 && actual)
return actual
if (actual === expected)
return actual
if (JSON.stringify(actual) == JSON.stringify(expected))
return JSON.stringify(actual)
throw TypeError('Assertion failed')
}
// Exploring clojure reducibles
// Some helper functions we'll use
function increment(x) { return x + 1 }
function isOdd(n) { return n % 2 }
// Creating generic `reduce` function
function reduce(f, items, start) {
return items.reduce(f, start)
}
new function() {
// Very basic approach
// Implement filter using reduce
function filter(f, items) {
return reduce(function(result, item) {
if (f(item)) result.push(item)
return result
}, items, [])
}
assert(filter(isOdd, [ 1, 2, 3, 4 ]), [ 1, 3 ])
// Implement map using reduce
function map(f, items) {
return reduce(function(result, item) {
result.push(f(item))
return result
}, items, [])
}
assert(map(increment, [ 1, 2, 3, 4 ]), [ 2, 3, 4, 5 ])
// You can pipe filter and reduce
assert(map(increment, filter(isOdd, [ 1, 2, 3, 4 ])), [ 2, 4 ])
}
new function() {
// now in previous examples we were pipeing results from
// filter to map. This is not very optimal, we could take
// a different approach of composing receipts of reduction
// instead of performing actual operations.
// Let's define "reducible" abstraction, which is an object
// implements `reduce` function with a same api as [].reduce
({
reduce: function(f, start) {
// ...
}
})
// Les define `reducible` high order function for creating
// objects implementing reducible abstraction
function reducible(reduce) {
return { reduce: reduce }
}
global.reducible = reducible
// Now let's define a function that would allow us to consume
// reducibles.
function array(reducible) {
// reduce reducible and colelect it's
// items into array which we return after.
return reduce(function(result, item) {
result.push(item)
return result
}, reducible, [])
}
global.array = array
// Array falls into our definition of reducibles so following
// should work
assert(array([ 1, 2, 3 ], [ 1, 2, 3 ]))
// Now we can define filter function, which will create new
// reducible instead of acutally performing reduce.
function filter(fx, items) {
return reducible(function(f, start) {
return reduce(function(result, item) {
if (fx(item)) return f(result, item)
return result
}, items, start)
})
}
// Lets filter same array once again.
var filtered = filter(isOdd, [ 1, 2, 3, 4 ])
// This time around we won't get array though.
assert(!Array.isArray(filtered))
// But if we reduce it to array we wil get expceted result
assert(array(filtered), [ 1, 3 ])
// Now let's define map function in similar manner:
function map(fx, items) {
return reducible(function(f, start) {
return reduce(function(result, item) {
return f(result, fx(item))
}, items, start)
})
}
// lets map same array once again
var mapped = map(increment, [ 1, 2, 3, 4 ])
// This time around we won't get array either.
assert(!Array.isArray(filtered))
// Although reducing to array will give us same result
assert(array(mapped), [ 2, 3, 4, 5 ])
// Finally we can can compose new reducables in a
// similar way as we did with pipeing
var reduced = map(increment, filter(isOdd, [ 1, 2, 3, 4 ]))
// With a difference that no actions will take plase
assert(!Array.isArray(filtered))
// Untill we reduce a result
assert(array(reduced), [ 2, 4 ])
}
new function() {
// now most of the code in previous filter and map was
// a same boilerplate so we should be able to abstract
// it away
function reducer(process) {
return function(f, items) {
return reducible(function(next, start) {
return reduce(function(result, item) {
return process(f, next, result, item)
}, items, start)
})
}
}
// now our filter function is reduced to actual logic
var filter = reducer(function(f, next, result, item) {
return f(item) ? next(result, item) : result
})
// Lets filter same array once again.
var filtered = filter(isOdd, [ 1, 2, 3, 4 ])
// This time we'll reducer again.
assert(!Array.isArray(filtered))
// which can be reduced to same result
assert(array(filtered), [ 1, 3 ])
var map = reducer(function(f, next, result, item) {
return next(result, f(item))
})
// lets map same array once again
var mapped = map(increment, [ 1, 2, 3, 4 ])
// This time around we won't get array either.
assert(!Array.isArray(filtered))
// Although reducing to array will give us same result
assert(array(mapped), [ 2, 3, 4, 5 ])
// Finally we can can compose new reducables in a
// similar way as we did with pipeing
var reduced = map(increment, filter(isOdd, [ 1, 2, 3, 4 ]))
// With a difference that no actions will take plase
assert(!Array.isArray(filtered))
// Untill we reduce a result
assert(array(reduced), [ 2, 4 ])
}
new function() {
// Now lets reduce amount of arguments passed to reducers
function reducer(process) {
return function(f, items) {
return reducible(function(next, start) {
return reduce(function(result, item) {
var value = process(f, item)
return value !== undefined ? next(result, value) : result
}, items, start)
})
}
}
global.reducer = reducer
// now our filter function is reduced to actual logic
var filter = reducer(function(f, item) {
if (f(item)) return item
})
// Lets filter same array once again.
var filtered = filter(isOdd, [ 1, 2, 3, 4 ])
// This time we'll reducer again.
assert(!Array.isArray(filtered))
// which can be reduced to same result
assert(array(filtered), [ 1, 3 ])
var map = reducer(function(f, item) {
return f(item)
})
// lets map same array once again
var mapped = map(increment, [ 1, 2, 3, 4 ])
// This time around we won't get array either.
assert(!Array.isArray(filtered))
// Although reducing to array will give us same result
assert(array(mapped), [ 2, 3, 4, 5 ])
// Finally we can can compose new reducables in a
// similar way as we did with pipeing
var reduced = map(increment, filter(isOdd, [ 1, 2, 3, 4 ]))
// With a difference that no actions will take plase
assert(!Array.isArray(filtered))
// Untill we reduce a result
assert(array(reduced), [ 2, 4 ])
}
new function() {
// now lets implement take for example
function pick(n, items) {
return reducer(function(f, item) {
if (n-- > 0) return item
})(null, items)
}
// this version is not very efficennt as it
// will reduce the whole thing even when it
// could stop half way through it
assert(array(pick(2, [ 2, 3, 4, 5 ])), [ 2, 3 ])
}
new function() {
// Lets improve this so we could interrupt reduction
// as necessary. Define `reduced` function that will
// wrap result indicating that it's complete.
function reduced(value) {
return Object.create(reduced.prototype, { value: { value: value } })
}
reduced.is = function is(value) {
return value && value.constructor === reduced
}
// Update our reducer implmentation to handel reduction
// interruption.
function reducer(process) {
return function(f, items) {
return reducible(function(next, start) {
return reduce(function(result, item) {
var value = process(f, item, reduced)
var ended = reduced.is(value)
value = ended ? value.value : value
result = value === undefined ? result : next(result, value)
return ended ? reduced(result) : result
}, items, start)
})
}
}
global.reducer = reducer
// Change array reduce to add support for reduce
// interrupt.
ArrayReduce = Array.prototype.reduce
Array.prototype.reduce = function(f, start) {
var value, index, result = start
for (index = 0; index < this.length; index++) {
result = f(result, this[index])
if (reduced.is(result)) return result.value
}
return result
}
// Implement take to take advantage of interrupt
var take = reducer(function take(f, item, last) {
return f(item) ? item : last()
})
// Implement pick on top of take.
var pick = function(n, items) {
var count = n
return take(function(item) {
return count -- > 0
}, items)
}
assert(array(pick(2, [ 2, 3, 4, 5 ])), [ 2, 3])
assert(array(take(isOdd, [ 1, 3, 2, 3, 4, ])), [ 1, 3 ])
}
@Gozala
Copy link
Author

Gozala commented May 11, 2012

In many ways this seems is similar to my initial draft of streamer which is exciting! Also, I really like how interruption is designed, marking last desired item makes so much more sense! Now one thing that is very interesting to learn is weather it's possible or not to represent pull streams via reducibles. Pause consumption in some way, marking last item before pause may be a one way to do that.

@Gozala
Copy link
Author

Gozala commented May 12, 2012

Some performance comparison to native arrays http://jsperf.com/reducibles

@Gozala
Copy link
Author

Gozala commented May 13, 2012

Ok, so if we leave out interruption for reducibles it can go faster than native implementation:
http://jsperf.com/reducibles/2

@Gozala
Copy link
Author

Gozala commented May 14, 2012

And difference is even bigger on bigger data sets http://jsperf.com/reducibles/3

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment