I am using express and node-pg to import excel files into the postgres database
Currently I loop through the excel rows and perform inserts for each row, but I I think this is not the right way:
workbook.xlsx.readFile(excel_file).then(function () {
// get the first worksheet
var worksheet = workbook.getWorksheet(1);
// Loop through all rows
worksheet.eachRow(function (row, rowNumber) {
// Commit to DB only from line 2 and up . We want to exclude headers from excel file
if (rowNumber> 1) {
// Loop through all values and build array to pass to DB function
row.eachCell(function (cell, colNumber) {
arrSQLParams.push(cell.value)
})
// Add the user id from session to the array
arrSQLParams.push(user) ;
// Insert into DB
db.query(strSQL, arrSQLParams, func tion (err, result) {
if (err) {
console.log(err);
ret = false;
}
})
// Empty the array for new query
arrSQLParams = [];
}
})
});
Is there any better Way to improve performance?
After the author provides clarification, in order to insert up to 1000 records at a time, Multi-row insert with pg The solution suggested in -promise is exactly what the author needs in terms of performance and flexibility.
UPDATE
Must-read article: Data Imports.
< /div>
I am using express and node-pg to import excel files into the postgres database
Currently I loop through the excel rows and perform inserts for each row, but I think This is not the right way:
workbook.xlsx.readFile(excel_file).then(function () {
// get the first worksheet
var worksheet = workbook.getWorksheet(1);
// Loop through all rows
worksheet.eachRow(function (row, rowNumber) {
// Commit to DB only from line 2 and up. We want to exclude headers from excel file
if (rowNumber> 1) {
// Loop through all values and build array to pass to DB function
row.eachCell(function (cell, colNumber ) {
arrSQLParams.push(cell.value)
})
// A dd the user id from session to the array
arrSQLParams.push(user);
// Insert into DB
db.query(strSQL, arrSQLParams, function (err, result ) {
if (err) {
console.log(err);
ret = false;
}
})
/ / Empty the array for new query
arrSQLParams = [];
}
})
});
Is there a better way to improve performance ?
After the author provides clarification, in order to insert up to 1000 records at a time, the solution suggested in Multi-row insert with pg-promise is exactly the author’s performance And flexibility.
UPDATE
Must read article: Data Imports.