nodejs set timeout on data response - node.js

I am trying to set a timeout in between readstream.write but I keep getting Error: write after end. How can I set a timeout in between res.write. Thank you. Here is my code.
app.get('/video', function (req, res) {
var readStream = fs.createReadStream('video.mjpeg');
res.writeHead(200, {'Content-Type': 'multipart/x-mixed- replace;boundary=ThisString'});
readStream.on("data", (chunk) => {
res.write(chunk, 'binary');
setTimeout(function () {
res.write('ThisString');
}, 200);
console.log("Writing to client");
});
readStream.on("end", () => {
readStream.destroy();
res.end();
console.log("End of data");
});
});
I am trying to display the video on the client side, the problem is that the mjpeg gets read quickly thus cannot create the video. Any help will be appreciated.

I wrote quickly some line of code of what I understand you need.
StackOverflow is not my favorite IDE so don't be too hard on it ^^
I don't really know the readStream but data event is called many time write ?
The queue is waiting the precedent chunk and its timeout are correctly finished.
Is it what you need ?
var kue = require('kue'),
jobs = kue.createQueue();
app.get('/video', function (req, res) {
var readStream = fs.createReadStream('video.mjpeg');
res.writeHead(200, {'Content-Type': 'multipart/x-mixed- replace;boundary=ThisString'});
jobs.process('write chunck', function (job, done) {
setTimeout(function() {
res.write(job.chunck, 'binary');
done && done();
}, 200);
});
jobs.process('write end', function(job, done) {
readStream.destroy();
res.end();
console.log("End of data");
done & done();
});
readStream.on("data", (chunk) => {
jobs.create('write chunck', {
chunk: chunk
});
});
readStream.on("end", () => {
jobs.create('write end', {});
});
});

Related

req.on('end fired but not req.on('data

My Nodejs req.on('data') not trigger, but req.on('end') works. I'm learning Nodejs and I don't know how to fix it.
My Nodejs file:
let routes = {
'GET': {
'/': (req,res) => {
fs.readFile('./public/index.html', (err, html) => {
if(err) throw err;
res.writeHead(200, {'Content-type': 'text/html'});
res.write(html);
res.end();
});
}
},
'POST': {
'/api/login': (req, res) => {
let body = '';
req.on('data', data => {
console.log('req data fired!');
body += data;
});
req.on('end', () => {
console.log('req end fired!')
res.end();
})
}
}
}
function router(req, res) {
let baseURL = url.parse(req.url, true);
let resolveRoute = routes[req.method][baseURL.pathname];
if(resolveRoute != undefined) {
resolveRoute(req, res);
}
}
http.createServer(router).listen(3000, () => console.log('Server running in port 3000'));
How to fix the req.on('data') triggering?
If the data event doesn't fire, but the end event does, then it's because there is no actual body data in the POST request.
With your exact code, the only way I could reproduce what you describe is when the POST body was empty and there was no data there to read. If, for example, you are submitting a form in the browser and there are no elements in the form with a name attribute, no data will be sent with the request.
As soon as I made sure there was some data in the POST request, then the data event fires and it presents a Buffer with the data in it.

NodeJs Decode to readable text

PROBLEM
I want to receive data from a device using IP Address via NodeJs. But I received the following data:
What I've Tried
This is the code that I've been able to get, which still produces the problem I described above.
var app = require('http').createServer(handler);
var url = require('url') ;
var statusCode = 200;
app.listen(6565);
function handler (req, res) {
var data = '';
req.on('data', function(chunk) {
data += chunk;
});
req.on('end', function() {
console.log(data.toString());
fs = require('fs');
fs.appendFile('helloworld.txt', data.toString(), function (err) {
if (err) return console.log(err);
});
});
res.writeHead(statusCode, {'Content-Type': 'text/plain'});
res.end();
}
And below is the result I received for console.log(req.headers)
So my question is, how do I decode the data? and anyone know what type of data are they?
Use Buffers to handle octet streams.
function handler (req, res) {
let body=[];
req.on('data', function(chunk) {
body.push(chunk);
});
req.on('end', function() {
body = Buffer.concat(body).toString('utf8');
...

nodejs proxy timing and async behavior

I have a question about the code below:
I would need a confirmation that all the processing done in proxyRes event
will be done asynchronously and all processing time in it won't affect
the proxying time.
Thank you in advance for help in this
var server = http.createServer(function (req, res) {
console.time();
var proxy = httpProxy.createProxyServer();
proxy.web(req, res, {
target: 'https://gdev.sserver.be/api/host1/account',
});
console.timeEnd();
proxy.on('proxyRes', function (proxyRes, req, res) {
//console.log('RAW Response from the target', JSON.stringify(proxyRes.headers, true, 2));
proxyRes.on('data', function (chunk) {
console.log('resp => ' + chunk);
connection.query('INSERT INTO test SET ?', {content: chunk}, function(err, result) {
if (err) throw err;
console.log('writing in db');
});
});
proxy.close();
proxy = null;
});
}).listen(3000);
Yes, the contents of your proxyRes handler appear to be asynchronous. The proxy will occur while the database query is still doing what it needs to do, so you're right that it won't affect proxying time.

How to remove incomplete uploaded file?

I am creating a node server to upload files using 'express','fs' and 'busboy' module. The server is working as expected but when I cancel the upload before complete, the incomplete file is stored in the filesystem. How can I remove the incomplete file?
var express = require("express"),
fs = require("fs"),
Busboy = require("busboy");
app = express();
app.listen(7000);
app.get("/", display_form);
app.post("/upload", function(req, res) {
var busboy = new Busboy({
headers: req.headers
});
busboy.on("file", function(fieldname, file, filename, encoding, mime) {
var fstream = fs.createWriteStream("./uploads/" + filename);
file.pipe(fstream);
file.on("data", function(chunk) {
console.log(chunk.length);
});
file.on("end", function() {
console("end");
});
fstream.on("close", function() {
fstream.close();
console("fstream close");
});
fstream.on("error", function() {
console("fstream error ");
});
});
busboy.on("finish", function() {
console.log("uploaded");
res.send("file uploaded");
});
busboy.on("error", function() {
console("error busboy");
});
req.pipe(busboy);
});
Thanks for your help and I finally I found a way to this problem. I added under mentioned code snippet and its working fine.
req.on("close", function(err) {
fstream.end();
fs.unlink('./uploads/' + name);
console.log("req aborted by client");
});
I don't know busboy, but you open a stream and never close it.
Why don't you exploit the stream and filename to 'finish' and 'error' and act accordingly?
Example:
busboy.on('error', function() {
fs.unlink('./uploads/' + filename);
console.log('error busboy');
}

Stream buffer to client in Express

I have request handler to send file from MongoDB (GridFS) to client like below, but it use data variable so content is in memory. I need to make this in streaming mode and send file in chunks to client. I can't regognize how to pipe buffer to response. Look at second code - it doesn't work, but show something what i need.
Maybe it is useful: Data in GridFS is Base64 encoded, but may be changed if streaming can be more efficient.
In-Memory version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
var data = '';
stream.on("data", function(chunk) {
data += chunk;
});
stream.on("end", function() {
res.send(new Buffer(data, 'base64'));
});
});
});
});
Streaming mode version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
stream.on("data", function(chunk) {
new Buffer(chunk, 'base64').pipe(res);
});
stream.on("end", function() {
res.end();
});
});
});
});
Update
I think I'm close to resolve this. I found this works, but does't decode from Base64:
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(res);
});
exports.sendFile = function(db, res, fileId) {
var grid = require('gridfs-stream');
var gfs = grid(db, mongoose.mongo);
var on_error = function(){
res.status(404).end();
};
var readstream = gfs.createReadStream({
filename: fileId,
root: 'r'
});
readstream.on('error', function(err) {
if (('\'' + err + '\'') === '\'Error: does not exist\'') {
return on_error && on_error(err);
}
throw err;
});
return readstream.pipe(res);
}
I found a solution, but think that can be better. I use base64-stream module to decode Base64 stream. Solution below:
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(base64.decode()).pipe(res);
});
});
});
stream.on("data", function(chunk) {
res.send(chunk.toString('utf8'));
});

Resources