Skip to content

Instantly share code, notes, and snippets.

@Mahdhir
Created October 18, 2020 13:29
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save Mahdhir/94cc9a62dc096086b0de44630921e3d4 to your computer and use it in GitHub Desktop.
Save Mahdhir/94cc9a62dc096086b0de44630921e3d4 to your computer and use it in GitHub Desktop.
const _ = require('lodash')
async function batchRequests(options) {
let query = { offset: 0, limit: options.limit };
do {
batch = await model.findAll(query);
query.offset += options.limit;
if (batch.length) {
const promise = doLongRequestForBatch(batch).then(() => {
// Once complete, pop this promise from our array
// so that we know we can add another batch in its place
_.remove(promises, p => p === promise);
});
promises.push(promise);
// Once we hit our concurrency limit, wait for at least one promise to
// resolve before continuing to batch off requests
if (promises.length >= options.concurrentBatches) {
await Promise.race(promises);
}
}
} while (batch.length);
// Wait for remaining batches to finish
return Promise.all(promises);
}
batchRequests({ limit: 100, concurrentBatches: 5 });
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment