I am trying to parse a CSV file from S3 with Node JS hosted by AWS Lambda. I have tried using both fast-csv
and csv-parser
npm. However, both of my solutions would result in Lambda function failure caused by Error: write after end
. Thanks for the help.
// dependencies
const csv = require('fast-csv');
const parse = require('csv-parser')
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
const uuidv4 = require('uuid/v4');
// read S3 object stream
var s3Stream = s3.getObject(params, (err, data) => {
if (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
console.log(message);
callback(message);
} else {
console.log('CONTENT TYPE:', data.ContentType);
callback(null, data.ContentType);
}
}).createReadStream();
// read CSV with fast-csv
// options for fast-csv npm
var options = {
headers:true,
escape:'\\',
trim:true,
};
csv.fromStream(s3Stream, options).on("data", function(data) {
data.id = uuidv4();
data.createDate = new Date().toISOString();
console.log(data);
}).on("error", function(data) {
console.error("Got an error: " + data);
}).on("end", function() {
console.log("Done reading.");
});
// read CSV with csv-parser
var otherOptions = {
columns : true,
auto_parse : true,
escape : '\\',
trim : true,
};
var parser = parse(otherOptions);
parser.on('data', function(data) {
data.id = uuidv4();
data.createDate = new Date().toISOString();
console.log(data);
})
.on('end',function(data) {
//do something wiht csvData
console.log(data);
});
s3Stream.pipe(parser);
via derrdji
No comments:
Post a Comment