I worked with CSV data some years back and I’m always curious to know how well NodeJS handles CSV filesystem in the backend compare with the likes of Java, .NET, Ruby or PHP environments.
As it turns out, I figured you can do things with it in the smallest possible way, by importing the following modules.
let fs = require('fs');
let fastcsv = require('fast-csv');
We have the fast-csv module to handle the CSV data(especially if it’s fairly large dataset thus the chosen module has to be performant-driven) when reading it from the FileStream input like so.
let readableStreamInput = fs.createReadStream('./some-csv-table.csv');
let csvData = [];
fastcsv
.fromStream(readableStreamInput, {headers: true})
.on('data', (data) => {
let rowData = {};
Object.keys(data).forEach(current_key => {
rowData[current_key] = data[current_key]
});
csvData.push(rowData);
}).on('end', () => {
console.log('csvData', csvData);
console.log('total rows of table', csvData.length);
})