Node stream hangs when emitting error - node.js

I have a stream that's checking a CSV. It works fine except when emitting an error it hangs even after I send the response back.
export function ValidateCSV(options) {
let opt = options;
if (!(this instanceof ValidateCSV)) return new ValidateCSV(opt);
if (!opt) opt = {};
opt.objectMode = true;
opt.highWaterMark = 1000000;
Transform.call(this, opt);
}
util.inherits(ValidateCSV, Transform);
ValidateCSV.prototype.destroy = function () {
this.readable = false;
this.writable = false;
this.emit('end');
};
ValidateCSV.prototype._transform = function (chunk, encoding, done) {
// Do some stuff to the chunk
// Emit error
if (required.length > 0) {
this.emit('error', `The following columns are required: ${required.join(', ')}`);
}
done();
};
I was able to fix it by adding a destroy method but it is still slow and hangs for a few seconds with it. Is there a better way to end/destroy a Transform stream?
ValidateCSV.prototype.destroy = function () {
this.readable = false;
this.writable = false;
this.emit('end');
};
EDIT:
Here is how I'm using the stream with busboy:
function processMultipart(req, res) {
const userId = req.query._userId;
const busboy = new Busboy({ headers: req.headers, limits: { files: 1 } });
const updateId = req.params.id;
// Transform stream to validate the csv
const validateCSV = new ValidateCSV();
validateCSV
.on('finish', () => {
// Process the csv
})
.on('error', (er) => {
//Do some logging
res.status(500).json(er).end();
});
// Multipart upload handler
busboy
.on('file', (fieldname, file, filename) => {
dataset.name = fieldname.length > 0 ?
fieldname : filename.substr(0, filename.indexOf('.csv'));
file
.on('error', (er) => {
//Send Error
})
.on('end', () => {
// Save dataset to mongo
if (dataset._update) {
res.status(200).json(dataset).end();
} else {
Dataset.create(dataset, (er) => {
if (er) {
res.status(500).json(er).end();
} else {
res.status(200).json(dataset).end();
}
});
}
}).pipe(validateCSV);
});
req.pipe(busboy);
}

Related

incorrect header check while trying to uncompressed s3 object body?

I compress and upload an object to s3 using the follwoing code:
let data: string | Buffer = JSON.stringify(rules);
let contentType = "application/json";
let encoding = null;
let filename = `redirector-rules.json`;
if (format === "gz") {
contentType = "application/gzip";
encoding = "gzip";
filename = `redirector-rules.gz`;
const buf = Buffer.from(data, "utf-8");
data = zlib.gzipSync(buf);
}
// res.end(data);
// return res.status(200).send(data);
await s3.upload(filename, data, contentType, encoding);
I am assuming this is working correctly since when I donwload the result file using aws s3 cp command it works just fine and I am able to uncompress it on my machine. additionally, possibly unrelated fact, if I downlaod via the conole for s3, my system is unable to uncompress it and it possibly corrupt or truncated.
on the other end I have a lambda code that read get the object and attempt to decompress it:
const getRules = async (rulesCommand: GetObjectCommand): Promise<Config> => {
const resp = await fetchRulesFile(rulesCommand);
const data = await parseResponse(resp, rulesCommand);
return data;
};
const fetchRulesFile = async (rulesCommand: GetObjectCommand): Promise<GetObjectCommandOutput> => {
try {
console.log(`Retrieving rules file with name ${rulesCommand.input.Key}`);
const resp = await client.send(rulesCommand);
return resp;
} catch (err) {
throw new Error(`Error retrieving rules file: ${err}`);
}
};
const parseResponse = async (resp: GetObjectCommandOutput, rulesCommand: GetObjectCommand): Promise<Config> => {
const { Body } = resp;
if (!Body) {
throw new Error("No body in response");
}
let data: string = await Body.transformToString();
if (rulesCommand.input.Key?.endsWith(".gz")) {
console.log(`Uncompressing rules file with name ${rulesCommand.input.Key}`);
try {
data = zlib.gunzipSync(data).toString("utf-8");
} catch (err) {
throw new Error(`Error decompressing rules file: ${err}`);
}
}
return JSON.parse(data) as Config;
};
but I keep getting this error: Error: incorrect header check
I resolved the issue by using Readable and streams in the parseResponse function:
const parseResponse = async (
resp: GetObjectCommandOutput,
rulesCommand: GetObjectCommand
): Promise<Config> => {
const { Body } = resp;
if (!Body) {
throw new Error("No body in response");
}
let data = "";
const readableStream = new Readable();
readableStream._read = () => {}; // noop
// #ts-ignore
Body.on("data", (chunk : any) => {
readableStream.push(chunk);
data += chunk;
});
// #ts-ignore
Body.on("end", () => {
readableStream.push(null);
});
const gunzip = zlib.createGunzip();
const result = await new Promise((resolve, reject) => {
let buffer = "";
readableStream.pipe(gunzip);
gunzip.on("data", (chunk) => {
buffer += chunk;
});
gunzip.on("end", () => {
resolve(buffer);
});
gunzip.on("error", reject);
});
return result as Config;
};
I had to add #ts-ignore at Body.on because of type mismatch. But it still worked in the compiled JS handler and fixing it with conversion seemed a bit complex.

how to work with response object in nodejs stream, exceljs and worker thread

I am using worker thread and stream at same time in node JS project. At initial I was not able to pass res object through main process to worker thread. I saw many stackoverflow question and solution and wrote a solution which works great. I created a Readable stream in main thread and writable stream in worker thread. while doing this, I have done a huge calculation from more than 10 table and export data which takes nearly 1 minutes for processing.
code:
router.get("/downloadAll", (req, res) => {
new Promise((resolve, reject) => {
const promise = [];
promise.push(Dashboard.DUser());
promise.push(Dashboard.DDUser());
promise.push(Dashboard.DDLUser());
promise.push(Dashboard.Din());
promise.push(Dashboard.Str());
promise.push(Dashboard.R());
promise.push(Dashboard.Q());
Promise.all(promise).catch(err => err)
.then(results => {
const worker = new Worker(`${process.cwd()}/src/route/modules/dashboard/worker.js`, {
workerData: { results }
});
const fileHeaders = [
{
name: "Content-Type",
value: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
}
];
res.setHeader("Content-Disposition", `attachment; filename="Stream.xlsx`);
fileHeaders.forEach(header => res.setHeader(header.name, header.value));
const readStream = new Readable({
read() {}
});
readStream.pipe(res);
worker.on("message", message => {
readStream.push(message);
});
worker.on("exit", code => {
console.log("exit", code);
resolve(true);
//if (code !== 0) reject(new Error(`stopped with ${code} exit code`));
});
});
})
.then(() => res.end())
.catch(err => console.log(err));
});
WORKER THREAD:
const { workerData, parentPort } = require("worker_threads");
const { Writable } = require("stream");
const Excel = require("exceljs");
const writableStream = new Writable();
// writableStream.on("message", () => {});
writableStream._write = (chunk, encoding, next) => {
parentPort.postMessage(chunk);
next();
};
const createWorkbook = () => {
const workbook = new Excel.stream.xlsx.WorkbookWriter({
stream: writableStream, // stream to server response
useStyles: true // not sure about this one, check with it turned off.
});
workbook.title = "Serious";
workbook.creator = "SS";
workbook.created = new Date();
return workbook;
};
const createSheet = workbook => {
workerData.results.forEach((result, index) => {
const worksheet = workbook.addWorksheet(result.title, {
properties: { outlineLevelCol: 1 }
});
worksheet.columns = Object.keys(result.data[0]).map(item => {
return { header: item, key: item };
});
result.data.forEach(row => worksheet.addRow(row).commit);
});
};
const workbook = createWorkbook();
createSheet(workbook);
workbook.commit();
The above code works fine and is fast for small calculation. when I have huge computation it is showing processing for 1 minutes and finish processing and download the xls file. so i updated the code to:
router.get("/downloadAll", (req, res) => {
const worker = new Worker(`${process.cwd()}/src/worker/worker.js`);
const fileHeaders = [
{
name: "Content-Type",
value: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
}
];
const today = new Date();
res.setHeader(
"Content-Disposition",
`attachment; filename=Q-${today.getFullYear()}${String(today.getMonth() + 1).padStart(2, "0")}${String(
today.getDate()
).padStart(2, "0")}.xlsx`
);
fileHeaders.forEach(header => res.setHeader(header.name, header.value));
const readStream = new Readable({
read() {}
});
readStream.pipe(res);
worker.on("message", message => {
readStream.push(message);
});
worker.on("exit", code => {
console.log("exit", code);
res.end();
//if (code !== 0) reject(new Error(`stopped with ${code} exit code`));
});
});
and worker thread code:
const { workerData, parentPort } = require("worker_threads");
const { Writable } = require("stream");
const Excel = require("exceljs");
const { resolve } = require("path");
const db = require(`${process.cwd()}/src/modules/db.module`);
const Dashboard = require(`${process.cwd()}/src/route/modules/dashboard.model`);
const promise = [];
promise.push(Dashboard.DUser());
promise.push(Dashboard.DDUser());
promise.push(Dashboard.DDLUser());
promise.push(Dashboard.Din());
promise.push(Dashboard.Str());
promise.push(Dashboard.R());
promise.push(Dashboard.Q());
Promise.all(promise).catch(err => err)
.then(results => { const writableStream = new Writable();
// writableStream.on("message", () => {});
writableStream._write = (chunk, encoding, next) => {
console.log(chunk.toString());
parentPort.postMessage(chunk);
next();
};
const createWorkbook = () => {
const workbook = new Excel.stream.xlsx.WorkbookWriter({
stream: writableStream, // stream to server response
useStyles: true // not sure about this one, check with it turned off.
});
workbook.creator = "ss";
workbook.created = new Date();
return workbook;
};
const createSheet = workbook => {
results.forEach((result, index) => {
// console.log(result);
const worksheet = workbook.addWorksheet(result.title, {
properties: { outlineLevelCol: 1 }
});
worksheet.columns = Object.keys(result.data[0]).map(item => {
return { header: item, key: item };
});
result.data.forEach(row => worksheet.addRow(row).commit);
});
};
The above code doesnot work correctly. I can get the data from callback from promise but when its downloading its shows 300kb , 200b,1byte and ends to 0 but it does not download.
if I try to insert the promise inside createsheet then i am getting error:
Error [ERR_UNHANDLED_ERROR]: Unhandled error. ({ message: 'queue closed', code: 'QUEUECLOSED', data: undefined })
code:
const createSheet = workbook => {
let promise = [];
/**
* get count of all the user list
*/
promise.push(Dashboard.DDPro());
Promise.all(promise)
.then(results => {
results.forEach((result, index) => {
console.log(result);
const worksheet = workbook.addWorksheet(result.title, {
properties: { outlineLevelCol: 1 }
});
worksheet.columns = Object.keys(result.data[0]).map(item => {
return { header: item, key: item };
});
result.data.forEach(row => worksheet.addRow(row).commit);
});
})
.catch(err => console.log(err));
};
can any body helps me solve the problem.

Export CSV without crash from node js

I need to export CSV for large amount of data, like it's having 100k rows, I'm using Json2CSV but sometimes it's taking a long time and crashed. I'm using node js stream but no use. I am googling but I don't have idea to clear it.Please any one can guide me how can I fix it.
Node JS
var formatedData = {......} //object with data
let filename = 'test' + '.csv';
let pathName = await writeToCSV(filename, formatedData, fields);
let readStream = fs.createReadStream(pathName);
res.setHeader('Content-disposition', `attachment; filename=${filename}`);
res.set('Content-Type', 'text/csv');
let downloadStream = readStream.pipe(res);
fields = null;
formatedData = null;
downloadStream.on('finish', function() {
fs.unlink(pathName, function() {});
downloadStream = null;
readStream = null;
});
writeToCSV
function writeToCSV(filename, data, fields, option) {
return new Promise((resolve, reject) => {
if (typeof data !== 'object') {
return reject(new Error('Data is not an object'));
}
let options = {
fields
};
if (typeof option === 'object') {
for (let key in option) {
options[key] = option[key];
}
}
let tmpPath = path.join(__dirname, '..', tmp);
let pathFile = tmpPath + filename;
return Promise.all([Promise.resolve(json2csv(data, options).split('\n')), checkTMPExist(tmpPath)]).then(data => {
let csvFormat = data[0];
let writeStream = fs.createWriteStream(pathFile);
csvFormat.forEach((lines, index) => {
if (index === csvFormat.length - 1) {
writeStream.end(lines + '\n');
} else {
writeStream.write(lines + '\n');
}
});
writeStream.on('finish', function() {
this.end();
return resolve(pathFile);
});
writeStream.on('error', function(err) {
this.end();
fs.unlink(pathFile, () => {});
return reject(err);
});
}).catch(err => {
fs.unlink(pathFile, () => {});
return reject(err);
});
});
}
Front end Ajax call
function export_csv(url, fileName) {
$.ajax({
url: url,
type: "GET",
success: function (result) {
var encodedUri = 'data:application/csv;charset=utf-8,' + encodeURIComponent(result);
var link = document.createElement("a");
link.setAttribute("href", encodedUri);
link.setAttribute("download", fileName);
document.body.appendChild(link);
link.click();
},
error: function (xhr) {
console.log("Export csv have some issue:" + JSON.stringify(xhr));
}
});
}

Unable to get callback from function

Trying to get the variable random_song from the function functions.random_song
Function:
functions.random_song = () => {
fs.readFile('auto_playlist.txt', 'utf8', function(err, data) {
if (err) {
console.log(`${`ERR`.red} || ${`Error fetching song from auto playlist ${err}`.red}`);
}
let songs = data.split('\n');
songs.splice(-1, 1);
var random = Math.floor(Math.random()*songs.length);
let random_song = (songs[random]);
return random_song;
})
}
Attempting to callback the random song
functions.random_song(song => {
console.log(song)
})
The code just return undefined Ideas?
Your function:
functions.random_song = (callback) => {
fs.readFile('auto_playlist.txt', 'utf8', function(err, data) {
if (err) {
// Do stuff
return callback(err);
}
// Do stuff
callback(null, random_song);
})
}
And then call it:
functions.random_song((err, song) => {
// Do stuff
});
You may want to read more about using Promise/async-await instead of callback.
functions.random_song = () => {
let data = fs.readFileSync('auto_playlist.txt', 'utf8');
let songs = data.split('\n');
songs.splice(-1, 1);
var random = Math.floor(Math.random()*songs.length);
let rand_song = (songs[random]);
return rand_song;
}
console.log(functions.random_song())
got it working, thanks for the help <3
Following is the code which use Promise to handle file read and data processing task asynchronously
const fs = require('fs')
const functions = {}
functions.random_song = () => {
return new Promise((resolve, reject) => {
fs.readFile('auto_playlist.txt', 'utf8', function(err, data) {
if (err) {
console.log(err)
reject(err)
}
let songs = data.split('\n');
songs.splice(-1, 1);
var random = Math.floor(Math.random()*songs.length);
let random_song = (songs[random]);
resolve(random_song);
})
})
}
functions.random_song()
.then(song => console.log('Song Name', song))
.catch(err => console.error('Error fetching songs', err))
console.log('It will be executed before promise is resolved')
fs.readFile is asynchronus and your function will return before the file is read. Use fs.readFileSync instead. Like this:
functions.random_song = () => {
const data = fs.readFileSync('auto_playlist.txt', 'utf8');
let songs = data.split('\n');
songs.splice(-1, 1);
var random = Math.floor(Math.random()*songs.length);
let random_song = (songs[random]);
return random_song;
}

How do I run multiple node streams in sequence?

Given two streams, stream1, stream2, how can I run them in sequence, throwing if any fails?
I'm looking for something simpler than this:
stream1.on('end',function(){
stream2.on('end',done);
});
stream1.on('error',function(error){done(error);});
stream2.on('error',function(error){done(error);});
Thanks.
there are some ways to do that, check next link, it gonna help to how to write some code in node in a elegant way:
Node.js FLOW
Hope it what you need.
Depending on your use case you could combine the two streams into one by using the multistream module.
Multistreams are constructed from an array of streams
var MultiStream = require('multistream')
var fs = require('fs')
var streams = [
fs.createReadStream(__dirname + '/numbers/1.txt'), // contains a single char '1'
fs.createReadStream(__dirname + '/numbers/2.txt'), // contains a single char '2'
fs.createReadStream(__dirname + '/numbers/3.txt') // contains a single char '3'
]
MultiStream(streams).pipe(process.stdout) // => 123
In case combining streams does not fit the use case you could build your stream on end event sending functionality on your own
const fs = require('fs');
var number1 = fs.createReadStream('./numbers1.txt')
.on('data', d => console.log(d.toString()));
var number2 = fs.createReadStream('./numbers2.txt')
.on('data', d => console.log(d.toString()));
onEnd([number1, number2], function(err) {
console.log('Ended with', err);
});
function onEnd(streams, cb) {
var count = streams.length;
var ended = 0;
var errored = null;
function shouldEnd() {
ended++;
if (errored) { return; }
if (count == ended) {
cb();
}
}
function endWithError(err) {
if (errored) { return; }
errored = true;
cb(err);
}
streams.forEach(s => s
.on('end', shouldEnd)
.on('error', endWithError)
);
}
The onEnd function can be used to wait for a stream array to end or in case an error event is emitted for the first emitted error event.
Try do it with async functions:
const { createReadStream } = require("fs")
async function main() {
const stream1 = createReadStream(__dirname + "/1.txt")
await pipe(stream1, process.stdout)
const stream2 = createReadStream(__dirname + "/2.txt")
await pipe(stream2, process.stdout)
const stream3 = createReadStream(__dirname + "/3.txt")
await pipe(stream3, process.stdout)
}
async function pipe(tap, sink) {
return new Promise((resolve, reject) => {
tap.pipe(sink, { end: false })
tap.on("end", resolve)
tap.on("error", reject)
})
}
Try do it with Promise
function doStream1(cb) {
// put those operation on stream1 in the callback function...
cb && cb();
var p = new Promise(function(resolve, reject) {
stream1.on( 'end', resolve );
stream1.on( 'error', reject );
});
return p;
}
function doStream2(cb) {
// some operation on stream2 on callback
cb && cb();
var p = new Promise(function(resolve, reject) {
stream2.on( 'end', resolve );
stream2.on( 'error', reject );
});
return p;
}
doStream1(cb).then(function() {
return doStream2(cb);
}).catch(function(e) {
// error handling is here
});
Try the code below(sequenceStream function). The thing which I am worried about is error handling, but it should work.
Also, if you use Node < 10.* then you need end-of-stream instead of stream.finished
const stream = require('stream');
const process = require('process')
const { promisify } = require('util')
const fromList = (lst) => new stream.Readable({
read() {
if (lst.length) {
this.push(String(lst.shift()))
} else {
this.push(null)
}
}
})
const _finished = promisify(stream.finished)
const sequenceStream = (streams) => {
const resultingStream = new stream.PassThrough()
let isNext = Promise.resolve()
for(const [i, curStream] of streams.entries()) {
isNext = isNext.then(() => {
curStream.pipe(resultingStream, {end: i === streams.length -1})
return _finished(curStream)
}).catch((err) => {
resultingStream.write(err)
})
}
return resultingStream
}
sequenceStream([
fromList([1, 2, 3, 4]),
fromList([5, 6, 7, 8]),
fromList([9, 10])
]).pipe(process.stdout)

Resources