Node pipe stops working - node.js

My client sends an image file to the server. It works 5 times and then it suddenly stops. I am pretty new using streams and pipe so I am not sure what I am doing wrong.
Server Code
http.createServer(function(req, res) {
console.log("File received");
// This opens up the writeable stream to `output`
var name = "./test"+i+".jpg";
var writeStream = fs.createWriteStream(name);
// This pipes the POST data to the file
req.pipe(writeStream);
req.on('end', function () {
console.log("File saved");
i++;
});
// This is here incase any errors occur
writeStream.on('error', function (err) {
console.log(err);
});
}).listen(3000);
Client code
var request = require('request');
var fs = require('fs');
setInterval(function () {
var readStream = fs.createReadStream('./test.jpg');
readStream.on('open', function () {
// This just pipes the read stream to the response object (which goes to the client)
readStream.pipe(request.post('http://192.168.1.100:3000/test'));
console.log("Send file to server");
});
}, 1000);

Behaves like a resource exhaustion issue. Not sure which calls throw errors and which just return. Does the server connect on the 6th call? Does the write stream open? Does the pipe open?
Try ending the connection and closing the pipe after the image is saved. Maybe close the write stream too, don't remember if node garbage collects file descriptors.

I had to do the following on the server side to make this work :
res.statusCode = 200;
res.end();

Related

When should I use 'open' event in Node.js?

I'm learning Node.js and while touching events and streams topic I encountered this example:
const http = require('http');
const fs = require('fs');
http
.createServer((req, res) => {
const fileStream = fs.createReadStream('./some-data.txt', 'utf8');
fileStream.on('open', () => {
fileStream.pipe(res);
});
fileStream.on('error', (err) => {
res.end(err);
});
})
.listen(5000);
While I understand the logic behind this code (server transfers some big data in chunks with the help of streams), I don't understand the benefit of using 'open' event.
Same code, where I just pipe data from read stream into write one (res object) right after creating fileStream without listening to 'open', produces exactly same result. What is the difference and in which cases should I listen to 'open'?
const http = require('http');
const fs = require('fs');
http
.createServer((req, res) => {
const fileStream = fs.createReadStream('./some-data.txt', 'utf8');
fileStream.pipe(res);
fileStream.on('error', (err) => {
res.end(err);
});
})
.listen(5000);
In this particular case, it does not really make any difference if you use the open event or not. If the file does not exist, the error event will be emitted anyway and you will send it via res.end(err.message) (not that res.end(err) fails because the err is an object).
The open event tells us that the stream is open so that we can handle that in the code.

Receive large binary data from socket in Nodejs

I would like to receive binary data (like .pdf or .doc) from a tcp socket, here is the code :
To send the file :
fs.readFile(path, function (err, data) {
var client = new net.Socket();
client.connect(user_port, user_ip, function () {
client.write(data, 'binary');
client.destroy();
});
});
To receive the file :
net.createServer(function(socket){
socket.setEncoding('binary');
socket.on('data', function (data) {
var file_data = new Buffer(data, 'binary');
fs.appendFile(utils.getUserDir() + '/my_file.doc', file_data);
});
socket.on('error', function(err){
console.log(err.message);
})
}).listen(utils.getPort(), utils.getExternalIp());
As the files are too big for only one tcp socket, they are sent with multiples packets, in fact there is multiple 'data' event for the same file.
I thought it was possible to append each Buffer() data to a file, but when I open the .doc, he is corrupted or has binary things in it.
PS: I can't use Buffer().concat and save the file after since I don't know which packet is the last one...
Thank you
For sending files like this, it's better to stream them instead of buffer it all into memory and then sending it. (Also, you don't need the 'binary' encoding argument since fs.readFile() gives you a Buffer by default)
For example:
var client = new net.Socket();
client.connect(user_port, user_ip, function() {
fs.createReadStream(path).pipe(client);
});
// ...
net.createServer(function(socket){
socket.pipe(fs.createWriteStream(utils.getUserDir() + '/my_file.doc'));
socket.on('error', function(err){
console.log(err.message);
});
}).listen(utils.getPort(), utils.getExternalIp());

POSTing RAW body with restify client

I'm trying to POST a raw body with restify. I have the receive side correct, when using POSTman I can send a raw zip file, and the file is correctly created on the server's file system. However, I'm struggling to write my test in mocha. Here is the code I have, any help would be greatly appreciated.
I've tried this approach.
const should = require('should');
const restify = require('restify');
const fs = require('fs');
const port = 8080;
const url = 'http://localhost:' + port;
const client = restify.createJsonClient({
url: url,
version: '~1.0'
});
const testPath = 'test/assets/test.zip';
fs.existsSync(testPath).should.equal(true);
const readStream = fs.createReadStream(testPath);
client.post('/v1/deploy', readStream, function(err, req, res, data) {
if (err) {
throw new Error(err);
}
should(res).not.null();
should(res.statusCode).not.null();
should(res.statusCode).not.undefined();
res.statusCode.should.equal(200);
should(data).not.null();
should(data.endpoint).not.undefined();
data.endpoint.should.equal('http://endpointyouhit:8080');
done();
});
Yet the file size on the file system is always 0. I'm not using my readStream correctly, but I'm not sure how to correct it. Any help would be greatly appreciated.
Note that I want to stream the file, not load it in memory on transmit and receive, the file can potentially be too large for an in memory operation.
Thanks,
Todd
One thing is that you would need to specify a content-type of multi-part/form-data. However, it looks like restify doesn't support that content type, so you're probably out of luck using the restify client to post a file.
To answer my own question, it doesn't appear to be possible to do this with the restify client. I also tried the request module, which claims to have this capability. However, when using their streaming examples, I always had a file size of 0 on the server. Below is a functional mocha integration test.
const testPath = 'test/assets/test.zip';
fs.existsSync(testPath).should.equal(true);
const readStream = fs.createReadStream(testPath);
var options = {
host: 'localhost'
, port: port
, path: '/v1/deploy/testvalue'
, method: 'PUT'
};
var req = http.request(options, function (res) {
//this feels a bit backwards, but these are evaluated AFTER the read stream has closed
var buffer = '';
//pipe body to a buffer
res.on('data', function(data){
buffer+= data;
});
res.on('end', function () {
should(res).not.null();
should(res.statusCode).not.null();
should(res.statusCode).not.undefined();
res.statusCode.should.equal(200);
const json = JSON.parse(buffer);
should(json).not.null();
should(json.endpoint).not.undefined();
json.endpoint.should.equal('http://endpointyouhit:8080');
done();
});
});
req.on('error', function (err) {
if (err) {
throw new Error(err);
}
});
//pipe the readstream into the request
readStream.pipe(req);
/**
* Close the request on the close of the read stream
*/
readStream.on('close', function () {
req.end();
console.log('I finished.');
});
//note that if we end up with larger files, we may want to support the continue, much as S3 does
//https://nodejs.org/api/http.html#http_event_continue

Proper way to pipe buffers in Node JS

Take a look:
var Client = require('ftp');
var fs = require('fs');
var c = new Client();
c.on('ready', function() {
c.get('foo.txt', function(err, stream) {
if (err) throw err;
stream.once('close', function() { c.end(); });
stream.pipe(fs.createWriteStream('foo.local-copy.txt'));
});
});
// connect to localhost:21 as anonymous
c.connect();
This piece of code is from https://www.npmjs.org/package/ftp. Basically it opens a read stream and pipes it into a write stream. At the end It closes the connection from the source.
Does the pipe method close the target stream after the piped stream (source) is closed? I couldn't find it on the API Documentation.
I made some test that from witch I can conclude it does it but I am no sure.
The destination stream is closed when the source emits an end event. This is documented in Stream.pipe:
By default end() is called on the destination when the source stream
emits end, so that destination is no longer writable.
This allows calls of the form:
var http = require('http'),
fs = require('fs');
http.createServer(function (req, res) {
fs.createReadStream('path/to/file').pipe(res);
}).listen(3000);
If end wasn't called on the response object, the request would time out.
This would make the request time out:
fs.createReadStream('path/to/file').pipe(res, {end: false});

NodeJS, OpenCV and Streaming Images Using Net Socket

My end goal is to stream video from my laptop to a server. I'm trying to accomplish this by using NodeJs on the laptop & the server. I use the OpenCV library to capture the video on the laptop and save it to a jpg file. I then read the file and convert it to base64 so that I can transport it using the Net.socket module in Node. This is a continous process: capture, encode, and send.
Here is the server code for just transmitting one jpg file:
var cv = require('opencv');
var fs = require('fs');
var net = require('net');
var camera = new cv.VideoCapture(0);
var server = net.createServer();
server.listen('50007', '127.0.0.1');
server.on('connection', function(socket){
camera.read(function(image){
image.save('original.jpg');
fs.readFile('original.jpg', 'base64', function(err, image){
socket.write(image, 'base64', function(){
socket.end();
});
});
});
});
On the client I loop until the FIN is received from the server. Here is the client code:
var net = require('net');
var fs = require('fs');
var client = new net.Socket();
var buffer ='';
client.setEncoding('base64');
client.connect('50007', '127.0.0.1', function(){
console.log('Connecting to server...');
});
client.on('data', function(data){
buffer += data;
});
client.on('end', function(){
var dataBuffer = new Buffer(buffer, 'base64');
fs.writeFile('copy.jpg', dataBuffer, function(err){
if(err){
console.log(err);
}
});
});
The problem is that the entire image does not actually get sent. When I open the received file, copy.jpg, there is always a chunk missing at the bottom.
In the final version the goal is to send one jpg after another and delimit the end of each 'jpg' via a keyword such as 'EndOfFile'. I tried to do this by appending the keyword 'EndOfFile' to my base64 encoded image before sending but on the receiving end that really got screwed up.
Sample Advanced Server:
fs.readFile('original.jpg', 'base64', function(err, image){
image += 'EndOfFile';
socket.write(image, 'base64');
});
One the client side the loop would examine each chunk of data for the keyword and if it found it then whatever is in the buffer would be written to file and the buffer reset, ready for the next file.
Sample Advanced Client
client.on('data', function(data){
if(data.indexOf('EndOfFile') > 0){
buffer += data.substr(0, data.indexOf('EndOfLine'));
var dataBuffer = new Buffer(buffer, 'base64');
fs.writeFile('copy.jpg', dataBuffer, function(err){
if(err){
console.log(err);
}
});
buffer = '';
} else {
buffer += data;
}
});
I've gotten this to work in Python so I think my logic is correct but I'm not as confortable in NodeJS.
If someone could tell me if this is a sane way to do this and where may I have gone wrong.
Thanks in advance!
I suspect you're seeing end event while the last bit of data is still buffered.
Try waiting for the close event rather than the end event. I'm not sure about sockets, but in other Node APIs like spawn, the end event is fired early, before related streams are flushed, so there may still be buffered data waiting.
You could avoid managing this yourself by piping. Use fs.createWriteStream() and .pipe() the socket stream to the file.

Resources