I am receiving a large file from an http post. I need to take each line from this file and insert it into a database. It is much faster to do this by inserting 500 or so lines at a time so this is what I have:
var readline = require('readline');
var insertSize = 500;
var records = [];
var reader = readline.createInterface({
input: inputStream
});
reader.on('line', function(line) {
reader.pause();
var entry = line.split('\t');
if (entry.length != 3) return reader.resume();
records.push({
first: entry[0],
second: entry[1],
third: entry[2]
});
if (records.length < insertSize) return reader.resume();
database.create(records).exec(function (err) {
if (err) return res.serverError(err);
records.length = 0;
reader.resume();
});
});
reader.on('error', function(err) {
return res.serverError(err);
});
reader.on('close', function() {
database.create(records).exec(function (err) {
if (err) return res.serverError(err);
return res.ok();
});
});
According to the documentation, the line and close event can still be triggered after pause(). I am seeing this happen, as the close event is triggered before the database.create() finishes in the line event. Is there a better way to handle this? line-by-line would be good but it can't take a stream as an input. Thanks.
via Alec Fenichel
No comments:
Post a Comment