public
Last active

Using continuation passing style with ES6 generators along with node's callback style functions as an alternative to promises for escaping callback hell.

  • Download Gist
continuable-generators.js
JavaScript
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572
"use strict";
 
const GeneratorFunction = function*(){}.constructor;
const GeneratorFunctionPrototype = GeneratorFunction.prototype;
const GeneratorPrototype = GeneratorFunctionPrototype.prototype;
 
const slice = Array.prototype.slice;
const concat = Array.prototype.concat;
const toString = Object.prototype.toString;
 
 
/**
* A user callback function that is raised when the result of an async function
* is available.
*
* @param Any error
* If truthy indicates an error result. Will be thrown from the
* generator.
* @param Any result
* If the callbaxk is successful, will be the yielded value.
*/
function Callback(error, result){}
 
 
/**
* The type of function that must be yielded from a generator being run.
*
* @param Callback callback
* The function which will handle the error or result of the
* continuation.
*/
function Continuation(callback){}
 
 
/**
* A GeneratorFunction where every value yielded must either itself be a
* Generator or a Continuation.
*
* @return Generator
*/
function Continuable(/*...args*/){}
 
 
 
/**
* A no-operation function.
*/
const noop = function(){};
 
 
/**
* Coerce a value to an object.
*
* @param any value
* The value to coerce.
* @return Object
*/
const toObject = function(value){
if (value == null) {
throw new TypeError('Cannot coerce to object.');
}
return Object(value);
};
 
 
/**
* Coerce a value to an unsigned 32 bit integer;
*
* @param any value
* The value to coerce.
* @return Number
*/
const toUint32 = function(value){
return value >>> 0;
};
 
 
/**
* Wrap a function so it is only called the first time the wrapper is called.
*
* @param Function fn
* The function to wrap.
* @return Function
* The wrapper.
*/
const once = function(fn){
return function(){
if (fn) {
const f = fn;
fn = null;
return f.apply(this, arguments);
}
};
};
 
 
/**
* Check whether a given value is a GeneratorFunction.
*
* @param Any value
* The value to check.
* @return Boolean
*/
const isGeneratorFunction = function(value){
return value instanceof GeneratorFunction
|| typeof value === 'function'
&& !!value.constructor
&& value.constructor.name === 'GeneratorFunction';
};
 
 
/**
* Check whether a given value is a Generator.
*
* @param Any value
* The value to check.
* @return Boolean
*/
const isGenerator = function(value){
return toString.call(value) === '[object Generator]';
};
 
 
// `send` was removed after an early iteration in V8's implementation
const send = 'send' in GeneratorPrototype ? 'send' : 'next';
 
/**
* Uses continuation passing style to iterate through each yield in a Generator.
*
* @param Generator generator
* The generator object to pump.
* @param Continuation continuation
* The continuation which will be passed the callback that dispatches to
* the generator.
*/
const pump = function(generator, continuation){
if (typeof continuation !== 'function') {
throw new TypeError("Yielded a non-function");
}
 
continuation(function(err, result){
if (err) {
generator.throw(err);
} else {
const next = generator[send](result).value;
next && pump(generator, next);
}
});
};
 
 
/**
* Handles either a Continuable function or a Generator. If passed a Generator a
* nested event loop is run to completion to obtain the final value.
*
* @param Continuable|Continuation
* The thing to pump a single turn.
* @param Callback cb
* The Callback to be passed to the Continuable.
*/
const resolve = function(value, cb){
if (typeof value === 'function') {
value(cb);
} else if (isGenerator(value)) {
run(function*(){
try {
cb(null, yield* value);
} catch (e) {
cb(e);
}
});
} else {
throw new TypeError('Must return a Continuation or Continuable');
}
};
 
 
/**
* Creates a generator from a generator function and begins executing it.
*
* @param GeneratorFunction generatorFn
* The generator function to execute. Every yielded value should be a
* Continuable.
*/
const run = exports.run = function run(generatorFn){
const generator = generatorFn();
const first = generator.next();
if (!first.done) {
pump(generator, first.value);
}
};
 
 
/**
* Wrap an async function as a GeneratorFunction so it can be used with
* delegating yield (yield*).
*
* @param Function fn
* Function to wrap. This function must accept a Callback as its last
* parameter.
* @return Continuable
* Wrapped version of the function that can be yielded to in a
* generator executed using `run`.
*/
const wrap = exports.wrap = function wrap(fn){
return function*(){
const args = slice.call(arguments);
const receiver = this;
return yield function(cb){
fn.apply(receiver, args.concat(cb));
};
};
};
 
 
const _sleep = wrap(function(ms, cb){
const start = Date.now();
setTimeout(function(){
cb(null, Date.now() - start);
}, ms);
});
 
/**
* Helper that can be used to pause execution.
*
* @param Number ms
* Time to pause.
* @return Continuation
* Function that can be be yielded to in a generator executed using
* `run`.
*/
exports.sleep = function sleep(ms){
return _sleep(ms);
};
 
 
const _tick = wrap(function(){
if (typeof process === 'undefined') {
return function(cb){
setTimeout(cb, 0);
};
}
return process.nextTick;
}());
 
/**
* Helper that waits until the next event loop tick.
*
* @return Continuation
* Function that can be be yielded to in a generator executed using
* `run`.
*/
exports.tick = function tick(){
return _tick();
};
 
 
/**
* Uses a Continuable function to sequentially map an array of values.
*
* @param Array array
* The set of values to map over.
* @param Continuable cb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Array
* The mapped array.
*/
exports.forEach = function* forEach(array, itemcb, receiver){
const obj = toObject(array);
const len = toUint32(obj.length);
 
for (let i = 0; i < len; i++) {
if (i in obj) {
yield* itemcb.call(receiver, obj[i], i, array);
}
}
};
 
 
/**
* Uses a Continuable function to sequentially map an array of values.
*
* @param Array array
* The set of values to map over.
* @param Continuable itemcb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Array
* The mapped array.
*/
exports.map = function* map(array, itemcb, receiver){
const obj = toObject(array);
const len = toUint32(obj.length);
const result = new Array(len);
 
for (let i = 0; i < len; i++) {
if (i in obj) {
result[i] = yield* itemcb.call(receiver, obj[i], i, array);
}
}
 
return result;
};
 
 
/**
* Uses a Continuable function to sequentially filter a set of values.
*
* @param Array array
* The set of values to map over.
* @param Continuable itemcb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Array
* The filtered array.
*/
exports.filter = function* filter(array, itemcb, receiver){
const obj = toObject(array);
const len = toUint32(obj.length);
const result = [];
 
for (let i = 0; i < len; i++) {
const item = obj[i];
if (i in obj && (yield* itemcb.call(receiver, item, i, array))) {
result[result.length] = item;
}
}
 
return result;
};
 
 
/**
* Uses a Continuable function to reduce a set of values.
*
* @param Array array
* The set of values to map over.
* @param Continuable itemcb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Any
* The final accumulated value.
*/
exports.reduce = function* reduce(array, itemcb, initial){
const obj = toObject(array);
const len = toUint32(obj.length);
let start = arguments.length < 3 ? 1 : 0;
let accum = start ? obj[0] : initial;
 
for (let i = start; i < len; i++) {
if (i in obj) {
accum = yield* itemcb(accum, obj[i], array);
}
}
 
return accum;
};
 
 
/**
* Returns true the first time a Continuable function returns a truthy value
* against a set of values, otherwise returns false.
*
* @param Array array
* The set of values to map over.
* @param Continuable itemcb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Boolean
*/
exports.some = function* some(array, itemcb, receiver){
const obj = toObject(array);
const len = toUint32(obj.length);
 
for (let i = 0; i < len; i++) {
if (i in obj && (yield* itemcb.call(receiver, obj[i], i, array))) {
return true;
}
}
 
return false;
};
 
 
/**
* Returns false the first time a Continuable function returns a falsey value
* against a set of values, otherwise returns true.
*
* @param Array array
* The set of values to map over.
* @param Continuable itemcb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Boolean
*/
exports.every = function* every(array, itemcb, receiver){
const obj = toObject(array);
const len = toUint32(obj.length);
 
for (let i = 0; i < len; i++) {
if (i in obj && !(yield* itemcb.call(receiver, obj[i], i, array))) {
return false;
}
}
 
return true;
};
 
 
/**
* Create a parallelized function that works over an array of values.
*
* @param Function transform
* The callback that transforms each value set in the returned array.
* @param Function finalize
* The callback that transforms the completed array as a whole.
* @return Continuable
*/
const parallelFunction = function(transform, finalize){
return wrap(function(array, itemcb, receiver, cb){
const obj = toObject(array);
const len = toUint32(obj.length);
const result = new Array(len);
 
if (!len) {
return void cb(null, finalize(result));
}
 
let remaining = len;
cb = once(cb);
 
const handle = function(input, index){
return function(err, value){
if (err) {
cb(err);
} else if (!(index in result)) {
result[index] = transform(input, value);
if (!--remaining) {
cb(null, finalize(result));
}
}
};
};
 
for (let i=0; i < len; i++) {
if (i in obj) {
const item = obj[i];
resolve(itemcb.call(receiver, item, i, array), handle(item, i));
} else {
remaining--;
}
}
});
};
 
 
const _parallelForEach = parallelFunction(noop, noop);
 
/**
* Uses a Continuable function to map an array of values in parallel.
*
* @param Array array
* The set of values to map over.
* @param Continuable itemcb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Array
* The filtered array.
*/
exports.parallelForEach = function parallelForEach(array, itemcb, receiver){
return _parallelForEach(array, itemcb, receiver);
};
 
 
const _parallelMap = parallelFunction(function(input, result){
return result;
}, function(array){
return array;
});
 
/**
* Uses a Continuable function to map an array of values in parallel.
*
* @param Array array
* The set of values to map over.
* @param Continuable itemcb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Array
* The filtered array.
*/
exports.parallelMap = function parallelMap(array, itemcb, receiver){
return _parallelMap(array, itemcb, receiver);
};
 
 
const EMPTY = {};
 
const _parallelFilter = parallelFunction(function(input, result){
return result ? input : EMPTY;
}, function(array){
return array.filter(function(item){
return item !== EMPTY;
});
});
 
/**
* Uses a Continuable function to filter a set of values in parallel.
*
* @param Array array
* The set of values to map over.
* @param Continuable itemcb
* The function to be applied to each value.
* @param Any [receiver]
* The |this| value in the callback.
* @return Array
* The filtered array.
*/
exports.parallelFilter = function parallelFilter(array, itemcb, receiver){
return _parallelFilter(array, itemcb, receiver);
};
 
 
const _join = wrap(function(items, cb){
const result = [];
 
if (!remaining) {
return void cb(null, result);
}
 
let remaining = items.length;
cb = once(cb);
 
const handle = function(index){
return function(err, value){
if (err) {
cb(err);
} else if (!(index in result)) {
result[index] = value;
if (!--remaining) {
cb(null, result);
}
}
};
};
 
for (let i=0, item; item = items[i]; i++) {
resolve(item, handle(i));
}
});
 
/**
* Take multiple Continuables and combine them into a single Continuable
* that returns an array of the completed values, or errors if any error.
*
* @param ...Continuable args
* Any amount of Continuables or arrays of Continuables to join.
* @return Continuable
* The combined Continuable which will yield the results as an array.
*/
exports.join = function join(/* ...args */){
return _join(concat.apply([], arguments));
};
usage.js
JavaScript
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
"use strict";
 
const path = require('path');
const gen = require('../');
 
function wrapAll(obj, names){
const out = {};
names.forEach(function(name){
out[name] = gen.wrap(obj[name]);
});
return out;
}
 
const fs = wrapAll(require('fs'), [
'rename', 'ftruncate', 'truncate', 'chown', 'fchown', 'lchown', 'chmod','fchmod',
'lchmod', 'stat', 'lstat', 'fstat', 'link', 'symlink', 'readlink', 'realpath',
'unlink', 'rmdir', 'mkdir', 'readdir', 'close', 'open', 'utimes', 'futimes',
'fsync', 'write', 'read', 'readFile', 'writeFile', 'appendFile'
]);
 
 
 
function* fulldir(dir){
return (yield* fs.readdir(dir)).map(function(child){
return path.resolve(dir, child);
});
}
 
 
function* statdir(dir){
const children = yield* fulldir(dir);
 
return yield* gen.map(children, function(filepath){
return fs.stat(filepath);
});
}
 
 
function* sizedir(dir){
return (yield* statdir(dir)).reduce(function(total, child){
return child.isFile() ? child.size + total : total;
}, 0);
}
 
 
function* x10(value){
yield* gen.sleep(20);
return value * 10;
}
 
gen.run(function*(){
console.time('sequential');
console.log(yield* gen.map([1, 2, 3, 4, 5], x10));
console.timeEnd('sequential');
 
console.time('parallel');
console.log(yield* gen.parallelMap([1, 2, 3, 4, 5], x10));
console.timeEnd('parallel');
 
console.log(yield* gen.join(fs.stat('.'), fs.stat('..')));
console.log(yield* sizedir('..'));
});

You've really just re-invented promises, but discarded all the nice interoperability and features that make them easier to use.

You in fact have 2 completely different things that are both essentially promises.

  1. A function that takes a callback as its only argument. This is a promise. It is not a Promises/A+ promise. It does not offer any guarantees about when the callback will be called, how many times it will be called etc. etc. but it is a promise.
  2. A generator that has your special semantics.

To demonstrate this, we can convert one of your promises (I'll call them GenPromise) into a Promises/A+ promise using the following:

var promise = require('promise')//from npm or github.com/then/promise
function toPromisesAPlusCompliant(genPromise) {
  return new Promise(function (fulfill, reject) {
    resolve(genPromise, funciton (err, res) {
      if (err) reject(err)
      else fulfill(res)
    })
  })
}

Even more powerfully, we can make all instances of GenPromise into true Promises/A+ promises simply by extending the prototypes (note that for demonstration purposes I haven't bothered sorting out making these extensions non-enumerable etc.)

Function.prototype.then = then
GeneratorFunction.prototype.then = then
function then(onFulfilled, onRejected) {
  var promise = toPromisesAPlusCompliant(this)
  return promise.then.apply(this, arguments)
}

I can't help but think, why re-invent the wheel and make it worse? Just to be clear, I think there are lots of cool things about this library. I actually like the way you've done things like gen.map etc. I just don't get why you didn't make the entire thing use real promises. Even if it just used Promises/A promises instead of Promises/A+ it would still be way simpler and neater.

The inspiration for making this actually came from http://taskjs.org which uses promises with generators to do something similar. The reason why I thought this was worth making, instead of simply using taskjs, is that it seemed like promises have a lot of overlap generators in their purpose. A lot of the purpose of promises is to add error handling to async code and to allow for flow control over async code, and generators can accomplish this with better syntax.

You're right in saying:

It does not offer any guarantees about when the callback will be called, how many times it will be called etc. etc. but it is a promise.

That's the nature of continuations. This was actually one of the reasons I decided to make this instead of just using task.js. If you're using promises as a way to keep track of state for a scheduler that's driving a generator then you no longer need those features. Generators can't backtrack and can't restart at arbitrary locations, so a scheduler using promises to manage the execution of a generator (as in task.js) will only ever then a given promise a single time.

A generator that has your special semantics.

This is required for any attempt to implement coroutines on top of generators. In task.js the semantic is that you have to return a promise from any yield. In this library the semantic is that you have to return a continuation or another generator.

Ultimately both have value and can do the same thing. Right now this library is more powerful than task.js because task.js was created for an older version of generators that can't handle delegating yield. But that could be corrected. What's more important is probably what code you're trying to integrate with. If, for example, you're trying to work with node's core APIs, then this library is simpler because it uses callbacks of the same form. If you're using promises then use task.js.

As a retweeter of the original tweet...

I think it's ok to explore new language extensions—both syntactic and API centric—with no regard for existing patterns or pseudo-standards. It's an important part of the complete learning process.

If we treat every new thing as a threat as though its intention is to reinvent an exsisting thing and reject on this premise, then no progress will ever be made. Why have forEach? It just reinvents for-loop.

Yes, I was aware that task.js was probably the inspiration.

Generators can't backtrack and can't restart at arbitrary locations

You're half right, they can't backtrack or restart at arbitrary locations, but that doesn't negate the need for the guarantees that promises offer. The guarantee of only executing the callbacks once is perhaps even more important. If you call a function that triggers its continuation twice, it will resolve the next "genPromise" with the wrong value:

function argh(callback) {
  setTimeout(function () {
    callback(null, 2)
    callback(null, 2)
  }, 100)
}
function good(callback) {
  setTimeout(function () {
    callback(null, 40)
  }, 4000)
}
gen.run(function*(){
  var a = argh
  var b = good
  var lifeTheUniverseAndEverything = a + b
  assert(a + b === 42) //error, a+b = 4
});

This is required for any attempt to implement coroutines on top of generators

Sort of, the difference is that you expose that requirement to the users. e.g. gen.map returns a pseudo promise object that you have to handle with yield*. On the other hand, task.js keeps them effectively internal. Yes, the user has to create one, but they don't have to consider them as an entity, they consider the entirety of spawn(function*(){ as one thing, not worrying about the fact that it creates a generator at one point and then consumes it.

Not doing the yield* stuff isn't really a limitation if you already just use promises everywhere, you just doyield`. It's much nicer to have each thing be encapsulated so it returns some defined thing (a promise) rather than some internal thing (a generator with special semantics).

In addition, as I understand it, task.js will support yield*. I don't know of any environment to test this in, but I'm not aware of yield* requiring anything special to support, i.e. the following should work

function* subroutine() {
  yield getAsync()
  yield getAsync()
  return 5
}

spawn(function*() {
  var x = yield* subroutine()
  assert(x === 5)
})

The final problem is that actually your solution doesn't make it any easier to use with callbacks. Assume that I'm using the promise library Q (a typical promise library) and I want to replace windows style \r\n with unix style \n in a file:

var Q = require('q')
var spawn = require('span-library-of-some-sort')

var fs = require('fs')

function promisedMutateFile(filename) {
  return spawn(function* () {
    var content = yield Q.denodeify(fs.readFile)(filename, 'utf8')
    yield Q.denodeify(fs.writeFile)(filename, content.replace(/\r\n/g, '\n'))
  })
}

//or

function callbackMutateFile(filename, callback) {
  return spawn(function* () {
    var content = yield Q.denodeify(fs.readFile)(filename, 'utf8')
    yield Q.denodeify(fs.writeFile)(filename, content.replace(/\r\n/g, '\n'))
  }).denodeify(callback)
}

If I want to use your library:

var gen = requrie('gen-library')
var spawn = require('span-library-of-some-sort')

var fs = require('fs')

function* promisedMutateFile(filename) {
  var content = yield* gen.wrap(fs.readFile)(filename, 'utf8')
  yield* gen.wrap(fs.writeFile)(filename, content.replace(/\r\n/g, '\n'))
}

//or

function callbackMutateFile(filename, callback) {
  return gen.run(function* () {
    var content = yield* gen.wrap(fs.readFile)(filename, 'utf8')
    yield* gen.wrap(fs.writeFile)(filename, content.replace(/\r\n/g, '\n'))
  })
  //no way to safely attach callback so that all errors are handled properly
}

I don't see how yours is in any way superior. In either case you are wrapping the callback based API so that you get a promise based API, it's just that in your case you are returning a promise that looks enough like a callback API that you're choosing to fool yourself into thinking it's a callback based API.

As an experiment, try persuading everyone that all async functions should, instead of taking an extra argument, return a function that should be called with that single callback. It would be like asking people to use promises, but without the vast majority of the benefits.

@rwldrn I agree. I'm actually in favor of seeing new experiments and new implementations that challenge the existing wisdom of how promises currently work. For example there's actually a big difference between say Tasks in C# and Q-style promises in JavaScript.

I'd argue that Promises/A+ is by no-means a pseudo-standard. It's come from what is probably the single most open standards body in existence and has the backing of very nearly all JavaScript promise implementers.

I do think it's dangerous to ignore the past though. The key reason we don't have promises as the most popular way of doing asynchronous programming in JavaScript today is almost undoubtedly the early versions of node.js and recent versions of jQuery. Both of these used promises, but failed to take on board some of the most important points about promises.

I also think people create a lot of confusion when they create things that are promises but try and pretend they're not. My concern is that people will not read this very carefully. They will glance at this library and the brief description that precedes it and think: ah so you can do all the same cool generators stuff with callbacks that you could do with promises. At this point they'll dismiss both generators and promises as new things they don't need to learn and will carry on using whatever hacks they've been using in the past.

If the library was up front about it and said:

I've created a library that works like task.js except that it considers a simpler idea for what a promise is. In this library a promise is just a function that takes a callback as its only argument. This is an experimental idea and you should look to Promises/A+ for how most current JavaScript promises behave.

That would be fine. I personally think there are parts of promises I'd rather see changed. For example I'd actually prefer that we didn't force .then handlers to be called asynchronously. It would make it easier to write algorithms that supported being run both synchronously and asynchronously since you could build a promise that was just a wrapper for a value.

I also think there's actually room for the idea that a promise is a class (i.e. it has a prototype we can safely extend) that has a .register method that can be called before or after the promise is resolved or rejected and takes a node style callback as its argument. This would have the potential for somewhat higher performance than current implementations whilst still being possible to extend with the addition of a .then method.

continuations actually seem like they're just normal CPS functions which have been curried

if thats the case then the aspect of promises your enjoying here is the consistant interface

@ForbesLindesay After some experimenting with this stuff, I realized that continuable style code like this can easily be immune to callbacks being called multiple times as I show in this much simpler example: https://gist.github.com/creationix/5762837#comment-844161

Please sign in to comment on this gist.

Something went wrong with that request. Please try again.