I got lets say 100.000 records in array:
var eData = { "id": "1001", "type": "Regular" },
{ "id": "1002", "type": "Chocolate" },
{ "id": "1003", "type": "Blueberry" },
{ "id": "1004", "type": "Devil's Food" }
And so on... When I fire the node.js script below
var db = require('/QOpenSys/QIBM/ProdData/OPS/Node6/os400/db2i/lib/db2a');
var DBname = "*LOCAL";
var dbconn = new db.dbconn();
dbconn.conn(DBname);
var sqlA = new db.dbstmt(dbconn);
eData.forEach(function(eRow, i) {
var sql = "INSERT INTO lib.table VALUES( xx x x x) WITH NONE"
sqlA.exec(sql, function(rs, err) {
console.log("Execute Done.");
console.log(err);
});
});
The data will be mixed up in DB. Same id and type will be there 10 times, but it will hit the exact number of insertet records.
If I change to execSync, everything turns out right, but seams a bit slow. What am I missing to do async inserts?
What is the fastest way doing huge inserts?