Skip to content

Instantly share code, notes, and snippets.

@byurhannurula
Last active January 17, 2021 10:25
Show Gist options
  • Save byurhannurula/badc329b98f37cf85f9bebea56c58bfd to your computer and use it in GitHub Desktop.
Save byurhannurula/badc329b98f37cf85f9bebea56c58bfd to your computer and use it in GitHub Desktop.
Insert millions rows from CSV file into DB as chunks using Sequelize ORM's bulkCreate() function
const csv = require("csvtojson");
const { user } = require("./db/models")
let transaction = null;
let filePath = `${__dirname}/file.csv`;
const csvParseOptions = {
ignoreEmpty: true,
headers: ["field1", "field2", "field3", "field4", "field5"],
colParser: {
field3: "number",
field4: "number",
field5: "number",
}
}
try {
transaction = await sequelize.transaction();
return await csv(csvParseOptions)
.fromFile(filePath)
.then(async (data) => {
return dbBulkCreate(data, user, transaction, onSuccess);
function dbBulkCreate(array, dbModel, transaction, callback, chunk = 50000) {
let arrayLength = array.length;
let offset = 0;
return recursiveBulkAdd();
async function recursiveBulkAdd() {
let arrayChunk = array.splice(0, chunk);
await dbModel.bulkCreate(arrayChunk, { transaction })
.then(() => console.log('Success'))
.catch(() => processError());
offset = offset + chunk
if (offset < arrayLength) {
return recursiveBulkAdd();
}
return callback();
}
}
async function onSuccess() => {
await transaction.commit();
return;
}
function processError() => {
if (transaction !== null) {
await transaction.rollback();
}
return;
}
});
} catch (error) {
return processError();
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment