I am trying to import data from JSON file to mysql database using Sequelize. I have written the following javascript code to achieve the same. Though it is working for the small data set but when I ran it for large file (containing millions of records) it does not work and the errors I see like.
- javascript heap out of memory then I ran with this node --max-old-space-size=4096 importRecords.js then I got 2nd error
- Unhandled rejection SequelizeConnectionAcquireTimeoutError: Operation timeout at pool.acquire.catch.error (F:\demo-sequelize\node_modules\sequelize\lib\dialects\abstract\connection-manager.js:282:52)
var Sequelize = require('sequelize');
var JM = require('json-mapper');
const fs = require('fs');
var sequelize = new Sequelize('testdb', 'root', 'root', {
dialect : 'mysql',
pool: {
max: 5,
min: 0,
idle: 10000
},
});
var Profile = sequelize.define('profile', {
firstName: Sequelize.STRING,
lastName: Sequelize.STRING,
email: Sequelize.STRING
});
let rawdata = fs.readFileSync('largeData.json');
let input = JSON.parse(rawdata);
for(let customer of input){
//console.log(customer.email);
Profile.sync({force: true}).then(function () {
// Table created
return Profile.create({
firstName: customer.firstName,
lastName: customer.lastName,
email: customer.email
});
});
}
Can anyone suggest how can I achieve this with
1. minimum time may be using asynchronous execution.
2. In optimal way by minimizing sequelize logging while execution