Proper way to pipe buffers in Node JS - node.js

Take a look:
var Client = require('ftp');
var fs = require('fs');
var c = new Client();
c.on('ready', function() {
c.get('foo.txt', function(err, stream) {
if (err) throw err;
stream.once('close', function() { c.end(); });
stream.pipe(fs.createWriteStream('foo.local-copy.txt'));
});
});
// connect to localhost:21 as anonymous
c.connect();
This piece of code is from https://www.npmjs.org/package/ftp. Basically it opens a read stream and pipes it into a write stream. At the end It closes the connection from the source.
Does the pipe method close the target stream after the piped stream (source) is closed? I couldn't find it on the API Documentation.
I made some test that from witch I can conclude it does it but I am no sure.

The destination stream is closed when the source emits an end event. This is documented in Stream.pipe:
By default end() is called on the destination when the source stream
emits end, so that destination is no longer writable.
This allows calls of the form:
var http = require('http'),
fs = require('fs');
http.createServer(function (req, res) {
fs.createReadStream('path/to/file').pipe(res);
}).listen(3000);
If end wasn't called on the response object, the request would time out.
This would make the request time out:
fs.createReadStream('path/to/file').pipe(res, {end: false});

Related

When should I use 'open' event in Node.js?

I'm learning Node.js and while touching events and streams topic I encountered this example:
const http = require('http');
const fs = require('fs');
http
.createServer((req, res) => {
const fileStream = fs.createReadStream('./some-data.txt', 'utf8');
fileStream.on('open', () => {
fileStream.pipe(res);
});
fileStream.on('error', (err) => {
res.end(err);
});
})
.listen(5000);
While I understand the logic behind this code (server transfers some big data in chunks with the help of streams), I don't understand the benefit of using 'open' event.
Same code, where I just pipe data from read stream into write one (res object) right after creating fileStream without listening to 'open', produces exactly same result. What is the difference and in which cases should I listen to 'open'?
const http = require('http');
const fs = require('fs');
http
.createServer((req, res) => {
const fileStream = fs.createReadStream('./some-data.txt', 'utf8');
fileStream.pipe(res);
fileStream.on('error', (err) => {
res.end(err);
});
})
.listen(5000);
In this particular case, it does not really make any difference if you use the open event or not. If the file does not exist, the error event will be emitted anyway and you will send it via res.end(err.message) (not that res.end(err) fails because the err is an object).
The open event tells us that the stream is open so that we can handle that in the code.

Why it is needed to invoke close after finish in nodejs stream?

Here is a source:
var http = require('http');
var fs = require('fs');
var download = function(url, dest, cb) {
var file = fs.createWriteStream(dest);
var request = http.get(url, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(cb); // close() is async, call cb after close completes.
});
}).on('error', function(err) { // Handle errors
fs.unlink(dest); // Delete the file async. (But we don't check the result)
if (cb) cb(err.message);
});
};
Why file.close(cb) is called on finish event? Does not finish close a stream? Look strange.
Before NodeJS version 5.5.0 it was the application's responsibility to close the file descriptor and make sure there's no file descriptor leak.
In v5.5 they introduced the autoClose attribute for the method createWriteStream with a default value of true which means on error or finish the file descriptor will be closed automatically.
The source you're looking at was before v5.5.0 (2016-01-21)

Send file over tcp socket via proxy with streams

What i tried to achieve with node.js/io.js, is to send a file from one server to another one via a proxy. To avoid memory buffering i want to use streams.
The proxy should be able to connect to multiple targets dynamically. The target connection information for the proxy should be send prior to the filedata.
With normal socket communication and buffering it is not a problem. But how or in general can this be done with streams??
var net = require('net');
var fs = require('fs');
//create readstream from file
var myFile = fs.createReadStream('E:/sample.tar.gz');
// Proxy server
//####################################################################################################
var proxy = net.createServer(function (socket) {
// Create a new connection to the TCP server
var client = net.connect('9010');
// 2-way pipe between client and TCP server
socket.pipe(client).pipe(socket);
}).listen(9000);
// Targetserver
//####################################################################################################
var server = net.createServer(function (socket) {
// create filestream to write data into file
var destfile = fs.createWriteStream('E:/sample_copy.tar.gz')
socket.on('data', function (buffer) {
console.log('Get data on targetserver...');
// write buffer to file
destfile.write(buffer);
});
socket.on('end', function () {
// release file from writestream
destfile.end();
});
}).listen(9010);
// Client
//####################################################################################################
// Send file to proxy
var client = new net.Socket();
// connect to proxy
client.connect('9000', '127.0.0.1', function () {
console.log('Connection to proxy opened');
});
// send data to proxy
myFile.pipe(client);
// read response from taget
client.on('data', function(data) {
console.log('Response: ' + data);
// close the client socket completely
client.destroy();
});
// Add a 'close' event handler for the client socket
client.on('close', function() {
console.log('Connection to proxy closed');
});
Any hint to a good tutorial is also welcome.
TMOE
socket.write() already uses streams under the hood so you don't need to do anything special. Just send it the usual Buffer object or string and it will use a stream.
From the current source code of io.js, here's what happens when you use socket.write():
Socket.prototype.write = function(chunk, encoding, cb) {
if (typeof chunk !== 'string' && !(chunk instanceof Buffer))
throw new TypeError('invalid data');
return stream.Duplex.prototype.write.apply(this, arguments);
};
And stream is declared like this:
const stream = require('stream');
Apologies if I've misunderstood your question/requirements! By all means, clarify if I have misunderstood you and I'll try again (or delete this answer so it's not a distraction).

Node pipe stops working

My client sends an image file to the server. It works 5 times and then it suddenly stops. I am pretty new using streams and pipe so I am not sure what I am doing wrong.
Server Code
http.createServer(function(req, res) {
console.log("File received");
// This opens up the writeable stream to `output`
var name = "./test"+i+".jpg";
var writeStream = fs.createWriteStream(name);
// This pipes the POST data to the file
req.pipe(writeStream);
req.on('end', function () {
console.log("File saved");
i++;
});
// This is here incase any errors occur
writeStream.on('error', function (err) {
console.log(err);
});
}).listen(3000);
Client code
var request = require('request');
var fs = require('fs');
setInterval(function () {
var readStream = fs.createReadStream('./test.jpg');
readStream.on('open', function () {
// This just pipes the read stream to the response object (which goes to the client)
readStream.pipe(request.post('http://192.168.1.100:3000/test'));
console.log("Send file to server");
});
}, 1000);
Behaves like a resource exhaustion issue. Not sure which calls throw errors and which just return. Does the server connect on the 6th call? Does the write stream open? Does the pipe open?
Try ending the connection and closing the pipe after the image is saved. Maybe close the write stream too, don't remember if node garbage collects file descriptors.
I had to do the following on the server side to make this work :
res.statusCode = 200;
res.end();

NodeJS, OpenCV and Streaming Images Using Net Socket

My end goal is to stream video from my laptop to a server. I'm trying to accomplish this by using NodeJs on the laptop & the server. I use the OpenCV library to capture the video on the laptop and save it to a jpg file. I then read the file and convert it to base64 so that I can transport it using the Net.socket module in Node. This is a continous process: capture, encode, and send.
Here is the server code for just transmitting one jpg file:
var cv = require('opencv');
var fs = require('fs');
var net = require('net');
var camera = new cv.VideoCapture(0);
var server = net.createServer();
server.listen('50007', '127.0.0.1');
server.on('connection', function(socket){
camera.read(function(image){
image.save('original.jpg');
fs.readFile('original.jpg', 'base64', function(err, image){
socket.write(image, 'base64', function(){
socket.end();
});
});
});
});
On the client I loop until the FIN is received from the server. Here is the client code:
var net = require('net');
var fs = require('fs');
var client = new net.Socket();
var buffer ='';
client.setEncoding('base64');
client.connect('50007', '127.0.0.1', function(){
console.log('Connecting to server...');
});
client.on('data', function(data){
buffer += data;
});
client.on('end', function(){
var dataBuffer = new Buffer(buffer, 'base64');
fs.writeFile('copy.jpg', dataBuffer, function(err){
if(err){
console.log(err);
}
});
});
The problem is that the entire image does not actually get sent. When I open the received file, copy.jpg, there is always a chunk missing at the bottom.
In the final version the goal is to send one jpg after another and delimit the end of each 'jpg' via a keyword such as 'EndOfFile'. I tried to do this by appending the keyword 'EndOfFile' to my base64 encoded image before sending but on the receiving end that really got screwed up.
Sample Advanced Server:
fs.readFile('original.jpg', 'base64', function(err, image){
image += 'EndOfFile';
socket.write(image, 'base64');
});
One the client side the loop would examine each chunk of data for the keyword and if it found it then whatever is in the buffer would be written to file and the buffer reset, ready for the next file.
Sample Advanced Client
client.on('data', function(data){
if(data.indexOf('EndOfFile') > 0){
buffer += data.substr(0, data.indexOf('EndOfLine'));
var dataBuffer = new Buffer(buffer, 'base64');
fs.writeFile('copy.jpg', dataBuffer, function(err){
if(err){
console.log(err);
}
});
buffer = '';
} else {
buffer += data;
}
});
I've gotten this to work in Python so I think my logic is correct but I'm not as confortable in NodeJS.
If someone could tell me if this is a sane way to do this and where may I have gone wrong.
Thanks in advance!
I suspect you're seeing end event while the last bit of data is still buffered.
Try waiting for the close event rather than the end event. I'm not sure about sockets, but in other Node APIs like spawn, the end event is fired early, before related streams are flushed, so there may still be buffered data waiting.
You could avoid managing this yourself by piping. Use fs.createWriteStream() and .pipe() the socket stream to the file.

Resources