I have this code to grab a video feed on command, but once started it never stops
async function getBandonBeachesOne(req, res, next) {
let timer = 0;
let videoURL =
"http://dizzyisadog.viewnetcam.com:50000/nphMotionJpeg?Resolution=640x480&Quality=Clarity";
let videoStream = request
.get(videoURL)
.on("data", function (data) {
timer++;
if (timer > 30) {
timer = 0;
console.log("received " + data.length + " bytes of compressed data");
}
})
.on("end", () => {
console.log("end");
})
.pipe(res);
req.on("close", () => {
console.log("Client left");
videoStream.end("end");
videoStream.destroy();//data keeps comming from the request.get(videoURL)
});
}
This is a request handler, it gets the video feed and pipes it to the client. Once the client leaves, I get the "Client left" message, but no maktter what I try, I can't stop the on('data') event form firing forever
Thanks to this answer here
I found the solution to be
videoStream.abort()
Related
I am trying to send out the binary content from a node.js server. To do this, I allocate a buffer and fill up the buffer with my content, and call response.write() on it. Once it returns, I reuse the buffer with the new content. However it doesn't seem to work correctly for some reasons.
Here is the server code:
const http = require('http');
async function sendChunk( response, outbuf )
{
console.log( "Sending buffer: %s", outbuf );
// Send the buffer out. If it returns false,
// this means the kernel buffers are full,
// and we have to wait until they are available.
if ( await response.write( outbuf ) === false )
{
await new Promise(resolve => response.once('drain', ()=>{
resolve();
}));
}
}
async function sendData( response )
{
let outbuf = Buffer.alloc( 20 );
for ( let count = 0x45; count < 0x50; count++ )
{
for ( let i = 0; i < outbuf.length; i++ )
{
outbuf[i] = count;
}
await sendChunk( response, outbuf );
}
}
function webRequestHandler( request, response )
{
let body = [];
request.on('error', (err) => {
console.error(err);
return;
});
request.on('data', (chunk) => {
body.push(chunk);
});
response.on('error', (err) => {
console.error( "Error sending response: %s", err);
return;
});
// A whole body collected - process it
request.on('end', async () => {
// Handle the update; can return an error message
response.setHeader('Content-Type', 'text/plain');
await sendData( response );
response.end();
});
}
const webserver = http.createServer( webRequestHandler );
// Create the web service
webserver.on('error', function (err) {
console.log("[" + process.pid + "] " + JSON.stringify(err));
process.exit();
});
webserver.listen( { "host" : "127.0.0.1", "port" : 5252 }, () => {
console.log( "Server running" );
});
When tested via curl http://localhost:5252/ the server prints the following:
Sending buffer: EEEEEEEEEEEEEEEEEEEE
Sending buffer: FFFFFFFFFFFFFFFFFFFF
Sending buffer: GGGGGGGGGGGGGGGGGGGG
Sending buffer: HHHHHHHHHHHHHHHHHHHH
Sending buffer: IIIIIIIIIIIIIIIIIIII
Sending buffer: JJJJJJJJJJJJJJJJJJJJ
Sending buffer: KKKKKKKKKKKKKKKKKKKK
Sending buffer: LLLLLLLLLLLLLLLLLLLL
Sending buffer: MMMMMMMMMMMMMMMMMMMM
Sending buffer: NNNNNNNNNNNNNNNNNNNN
Sending buffer: OOOOOOOOOOOOOOOOOOOO
however the client receives something totally different:
> curl http://localhost:5252/
EEEEEEEEEEEEEEEEEEEEOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO
What's going on here? It does work if I create a new buffer in sendChunk which is a copy of outbuf. However this seem like waste of RAM, and doesn't really make sense for someone coming from C background, and there once you call send() on a socket, the data has been copied and you can reuse the source buffer as you wish.
Does node.js work differently? Do I need to create a dedicated buffer for response.write() which can no longer be touched once write is called, even if write has returned and I waited for the drain event?
I've posted the bug report, and it got closed with an important comment:
You should be passing a callback to .write() to know when node is
finished with that chunk of memory instead of relying on the 'drain'
event.
Once you make that change, you will see the output on the client that
you're expecting.
And indeed once the sendChunk function is changed as following:
async function sendChunk( response, outbuf )
{
return new Promise( function( resolve, reject) {
if ( response.write( outbuf, ()=>{ resolve(); } ) === false )
{
console.log( "Draining buffer" );
response.once('drain', ()=>{
resolve();
});
}
});
}
so we resolve it only in the function callback, the issue goes away. The core issue here is that response.write is not awaitable, and returns before the callback is called.
Should have read the documentation more carefully.
I'm trying to end a IPC stream from my node app. I can't seem to figure out which character represents end-of-stream though and my stream never finishes.
const net = require("net");
const server = net.createServer((stream) => {
console.log("Server: on connection");
stream.on("data", (buffer) => {
console.log("Got data:", buffer.toString());
stream.write("This message ends here ->.");
});
stream.on("end", () => {
console.log("Stream ended");
server.close();
});
});
server.listen("\\\\.\\pipe\\myPipe", () => {
console.log("Listening...");
});
I have tried to add a null character (\u0000) to the end of my message and check for that on the receiving end, but that feels a bit hacky. Is there a way to send something like a end-of-stream character(s)?
Thanks!
I am trying to extract images from a csv file by doing the following:
Parsing/streaming in a large csv file using csv-parse and the fs createReadStream method
Grabbing each line for processing using stream-transform
Extraction of image and other row data for processing using the async waterfall method.
Download and write image to server using request and the fs createWriteStream method
For some reason after the data gets piped into createWriteStream, there is some event in which an async callback never gets called. I have run this same code only using request, without piping to createWriteStream, and it works. I've also run createWriteStream w/ a drain event, and then some how it works? Can anyone explain this to me?
In the code below, request is trying to pipe 14,970 images, but the createWriteStream close or finish events only fire 14,895 times, with error firing 0 times. Could this be a draining issue? Could highWaterMark be exceeded and a write fail could be occurring undetected?
Here is my csv line getting code:
var first = true;
var parser = parse();
var transformer = transform( (line, complete) => {
if(!first)
extractData(line,complete)
else {
first = false;
complete(null);
}
},
() => {
console.log('Done: parseFile');
});
fs.createReadStream(this.upload.location).pipe(parser).pipe(transformer);
extractData function that doesn't always do a required async callback:
extractData(line,complete){
var now = new Date();
var image = {
createdAt: now,
updatedAt: now
};
async.waterfall([
next => { // Data Extraction
async.forEachOf(line, (data, i, complete) => {
if(i === 2) image.src = data;
if(i === 3) image.importSrc = data;
complete(null);
}, err => {
if(err) throw err;
next(null);
});
},
next => { // Download Image
var file = fs.createWriteStream('public/'+image.src);
var sendReq = request.get(image.importSrc);
sendReq.on('response', response => {
if (response.statusCode !== 200) {
this.upload.report.image.errors++;
return next(null);
}
});
sendReq.on('error', err => {
this.upload.report.image.errors++;
next(null);
});
sendReq.pipe(file);
file.on('finish', () => {
this.upload.report.image.inserts++;
file.close(next); // Close file and callback
});
file.on('error', err => {
this.upload.report.image.errors++;
next(null);
});
}
], err => {
if(err) throw err;
complete(null);
});
}
As suggested by #mscdex, I've also tried switching out finish for his replacement close approach.
file.close(next); is unnecessary as the file stream is closed automatically by default. What you can do instead is to listen for the close event to know when the file descriptor for the stream has been closed. So replace the entire finish event handler with:
file.on('close', () => {
this.upload.report.image.inserts++;
next(null);
});
I have the following code:
app.get('/pull-requests', function (request) {
fetchRepos(fetchPullRequests);
app.on('pull-requests:fetched', function (pullRequestsByRepo) {
var html = "";
_.each(pullRequestsByRepo, function (pullRequests) {
html += 'There is <strong>'+ pullRequests.length +'</strong> pending pull request(s) for <strong>'+ pullRequests[0].title +'</strong>:';
html += '<ul>';
_.each(pullRequests, function (pullRequest) {
html += '<li><em>'+ pullRequest.title +'</em> ('+ pullRequest.url +')</li>';
});
html += '</ul>';
});
response.send(html);
});
});
It works fine once. Every second request ends raising an error Can't set headers after they are sent..
EDIT: More code to explicit the logic
function fetchRepos (callback) {
_options.path = '/orgs/'+ app.get('org') +'/repos?client_id='+ app.get('client_id') +'&client_secret='+ app.get('client_secret');
// Fetch the list of repos for a given organisation
var request = https.get(_options, function (res) {
data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function () {
var repos = JSON.parse(data);
return callback(repos);
});
});
request.on('error', function (error) {
console.log('Problem with request: '+ e);
});
}
function fetchPullRequests (repos) {
var pullRequests = [];
_.each(repos, function (repo, index) {
_options.path = '/repos/'+ app.get('org') +'/'+ repo.name +'/pulls?client_id='+ app.get('client_id') +'&client_secret='+ app.get('client_secret');
var request = https.get(_options, function (res) {
(function () {
var data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function () {
data = JSON.parse(data);
if (data.length > 0) {
pullRequests.push(data);
}
if (index == (repos.length - 1)) {
app.emit('pull-requests:fetched', pullRequests);
}
});
})();
});
});
}
Your problem is that whenever you call app.on('pull-requests:fetched', …), you add a new listener, meaning that when the second request arrives, it will trigger the first one again.
node then complains because you try reply twice to the first request.
You could fix your immediate issue by calling app.once, which would ensure that the only fires once, but you would still have an issue if 2 requests arrive at the same time.
The correct pattern in this case is to pass a callback to fetchRepos.
I am using the following node.js code to download documents from some url and save it in the disk.
I want to be informed about when the document is downloaded. i have not seen any callback with pipe.Or, Is there any 'end' event that can be captured on completion of download ?
request(some_url_doc).pipe(fs.createWriteStream('xyz.doc'));
Streams are EventEmitters so you can listen to certain events. As you said there is a finish event for request (previously end).
var stream = request(...).pipe(...);
stream.on('finish', function () { ... });
For more information about which events are available you can check the stream documentation page.
Based nodejs document, http://nodejs.org/api/stream.html#stream_event_finish,
it should handle writableStream's finish event.
var writable = getWriteable();
var readable = getReadable();
readable.pipe(writable);
writable.on('finish', function(){ ... });
Code snippet for piping content from web via http(s) to filesystem. As #starbeamrainbowlabs noticed event finish does job
var tmpFile = "/tmp/somefilename.doc";
var ws = fs.createWriteStream(tmpFile);
ws.on('finish', function() {
// pipe done here, do something with file
});
var client = url.slice(0, 5) === 'https' ? https : http;
client.get(url, function(response) {
return response.pipe(ws);
});
I found an a bit different solution of my problem regarding this context. Thought worth sharing.
Most of the example create readStreams from file. But in my case readStream has to be created from JSON string coming from a message pool.
var jsonStream = through2.obj(function(chunk, encoding, callback) {
this.push(JSON.stringify(chunk, null, 4) + '\n');
callback();
});
// message.value --> value/text to write in write.txt
jsonStream.write(JSON.parse(message.value));
var writeStream = sftp.createWriteStream("/path/to/write/write.txt");
//"close" event didn't work for me!
writeStream.on( 'close', function () {
console.log( "- done!" );
sftp.end();
}
);
//"finish" event didn't work for me either!
writeStream.on( 'close', function () {
console.log( "- done!"
sftp.end();
}
);
// finally this worked for me!
jsonStream.on('data', function(data) {
var toString = Object.prototype.toString.call(data);
console.log('type of data:', toString);
console.log( "- file transferred" );
});
jsonStream.pipe( writeStream );
Here's a solution that handles errors in requests and calls a callback after the file is written:
request(opts)
.on('error', function(err){ return callback(err)})
.pipe(fs.createWriteStream(filename))
.on('finish', function (err) {
return callback(err);
});