I am reading data from the hardware device. I have written the code in node js and reading a response using TCP socket. The response looks like this
The code I have tried is this
socket.on('data', function(data) {
console.log(data.toString());
});
I have also tried this
var iconv = require('iconv-lite');
var encoding = 'iso-8859-1';
socket.on('data', function(data) {
var body1 = iconv.decode(data,encoding);
console.log(body1);
})
var encoding = 'utf8';
Should probably be your encoding, try this first
Related
I would like to receive binary data (like .pdf or .doc) from a tcp socket, here is the code :
To send the file :
fs.readFile(path, function (err, data) {
var client = new net.Socket();
client.connect(user_port, user_ip, function () {
client.write(data, 'binary');
client.destroy();
});
});
To receive the file :
net.createServer(function(socket){
socket.setEncoding('binary');
socket.on('data', function (data) {
var file_data = new Buffer(data, 'binary');
fs.appendFile(utils.getUserDir() + '/my_file.doc', file_data);
});
socket.on('error', function(err){
console.log(err.message);
})
}).listen(utils.getPort(), utils.getExternalIp());
As the files are too big for only one tcp socket, they are sent with multiples packets, in fact there is multiple 'data' event for the same file.
I thought it was possible to append each Buffer() data to a file, but when I open the .doc, he is corrupted or has binary things in it.
PS: I can't use Buffer().concat and save the file after since I don't know which packet is the last one...
Thank you
For sending files like this, it's better to stream them instead of buffer it all into memory and then sending it. (Also, you don't need the 'binary' encoding argument since fs.readFile() gives you a Buffer by default)
For example:
var client = new net.Socket();
client.connect(user_port, user_ip, function() {
fs.createReadStream(path).pipe(client);
});
// ...
net.createServer(function(socket){
socket.pipe(fs.createWriteStream(utils.getUserDir() + '/my_file.doc'));
socket.on('error', function(err){
console.log(err.message);
});
}).listen(utils.getPort(), utils.getExternalIp());
I'm trying to POST a raw body with restify. I have the receive side correct, when using POSTman I can send a raw zip file, and the file is correctly created on the server's file system. However, I'm struggling to write my test in mocha. Here is the code I have, any help would be greatly appreciated.
I've tried this approach.
const should = require('should');
const restify = require('restify');
const fs = require('fs');
const port = 8080;
const url = 'http://localhost:' + port;
const client = restify.createJsonClient({
url: url,
version: '~1.0'
});
const testPath = 'test/assets/test.zip';
fs.existsSync(testPath).should.equal(true);
const readStream = fs.createReadStream(testPath);
client.post('/v1/deploy', readStream, function(err, req, res, data) {
if (err) {
throw new Error(err);
}
should(res).not.null();
should(res.statusCode).not.null();
should(res.statusCode).not.undefined();
res.statusCode.should.equal(200);
should(data).not.null();
should(data.endpoint).not.undefined();
data.endpoint.should.equal('http://endpointyouhit:8080');
done();
});
Yet the file size on the file system is always 0. I'm not using my readStream correctly, but I'm not sure how to correct it. Any help would be greatly appreciated.
Note that I want to stream the file, not load it in memory on transmit and receive, the file can potentially be too large for an in memory operation.
Thanks,
Todd
One thing is that you would need to specify a content-type of multi-part/form-data. However, it looks like restify doesn't support that content type, so you're probably out of luck using the restify client to post a file.
To answer my own question, it doesn't appear to be possible to do this with the restify client. I also tried the request module, which claims to have this capability. However, when using their streaming examples, I always had a file size of 0 on the server. Below is a functional mocha integration test.
const testPath = 'test/assets/test.zip';
fs.existsSync(testPath).should.equal(true);
const readStream = fs.createReadStream(testPath);
var options = {
host: 'localhost'
, port: port
, path: '/v1/deploy/testvalue'
, method: 'PUT'
};
var req = http.request(options, function (res) {
//this feels a bit backwards, but these are evaluated AFTER the read stream has closed
var buffer = '';
//pipe body to a buffer
res.on('data', function(data){
buffer+= data;
});
res.on('end', function () {
should(res).not.null();
should(res.statusCode).not.null();
should(res.statusCode).not.undefined();
res.statusCode.should.equal(200);
const json = JSON.parse(buffer);
should(json).not.null();
should(json.endpoint).not.undefined();
json.endpoint.should.equal('http://endpointyouhit:8080');
done();
});
});
req.on('error', function (err) {
if (err) {
throw new Error(err);
}
});
//pipe the readstream into the request
readStream.pipe(req);
/**
* Close the request on the close of the read stream
*/
readStream.on('close', function () {
req.end();
console.log('I finished.');
});
//note that if we end up with larger files, we may want to support the continue, much as S3 does
//https://nodejs.org/api/http.html#http_event_continue
What i tried to achieve with node.js/io.js, is to send a file from one server to another one via a proxy. To avoid memory buffering i want to use streams.
The proxy should be able to connect to multiple targets dynamically. The target connection information for the proxy should be send prior to the filedata.
With normal socket communication and buffering it is not a problem. But how or in general can this be done with streams??
var net = require('net');
var fs = require('fs');
//create readstream from file
var myFile = fs.createReadStream('E:/sample.tar.gz');
// Proxy server
//####################################################################################################
var proxy = net.createServer(function (socket) {
// Create a new connection to the TCP server
var client = net.connect('9010');
// 2-way pipe between client and TCP server
socket.pipe(client).pipe(socket);
}).listen(9000);
// Targetserver
//####################################################################################################
var server = net.createServer(function (socket) {
// create filestream to write data into file
var destfile = fs.createWriteStream('E:/sample_copy.tar.gz')
socket.on('data', function (buffer) {
console.log('Get data on targetserver...');
// write buffer to file
destfile.write(buffer);
});
socket.on('end', function () {
// release file from writestream
destfile.end();
});
}).listen(9010);
// Client
//####################################################################################################
// Send file to proxy
var client = new net.Socket();
// connect to proxy
client.connect('9000', '127.0.0.1', function () {
console.log('Connection to proxy opened');
});
// send data to proxy
myFile.pipe(client);
// read response from taget
client.on('data', function(data) {
console.log('Response: ' + data);
// close the client socket completely
client.destroy();
});
// Add a 'close' event handler for the client socket
client.on('close', function() {
console.log('Connection to proxy closed');
});
Any hint to a good tutorial is also welcome.
TMOE
socket.write() already uses streams under the hood so you don't need to do anything special. Just send it the usual Buffer object or string and it will use a stream.
From the current source code of io.js, here's what happens when you use socket.write():
Socket.prototype.write = function(chunk, encoding, cb) {
if (typeof chunk !== 'string' && !(chunk instanceof Buffer))
throw new TypeError('invalid data');
return stream.Duplex.prototype.write.apply(this, arguments);
};
And stream is declared like this:
const stream = require('stream');
Apologies if I've misunderstood your question/requirements! By all means, clarify if I have misunderstood you and I'll try again (or delete this answer so it's not a distraction).
Take a look:
var Client = require('ftp');
var fs = require('fs');
var c = new Client();
c.on('ready', function() {
c.get('foo.txt', function(err, stream) {
if (err) throw err;
stream.once('close', function() { c.end(); });
stream.pipe(fs.createWriteStream('foo.local-copy.txt'));
});
});
// connect to localhost:21 as anonymous
c.connect();
This piece of code is from https://www.npmjs.org/package/ftp. Basically it opens a read stream and pipes it into a write stream. At the end It closes the connection from the source.
Does the pipe method close the target stream after the piped stream (source) is closed? I couldn't find it on the API Documentation.
I made some test that from witch I can conclude it does it but I am no sure.
The destination stream is closed when the source emits an end event. This is documented in Stream.pipe:
By default end() is called on the destination when the source stream
emits end, so that destination is no longer writable.
This allows calls of the form:
var http = require('http'),
fs = require('fs');
http.createServer(function (req, res) {
fs.createReadStream('path/to/file').pipe(res);
}).listen(3000);
If end wasn't called on the response object, the request would time out.
This would make the request time out:
fs.createReadStream('path/to/file').pipe(res, {end: false});
My end goal is to stream video from my laptop to a server. I'm trying to accomplish this by using NodeJs on the laptop & the server. I use the OpenCV library to capture the video on the laptop and save it to a jpg file. I then read the file and convert it to base64 so that I can transport it using the Net.socket module in Node. This is a continous process: capture, encode, and send.
Here is the server code for just transmitting one jpg file:
var cv = require('opencv');
var fs = require('fs');
var net = require('net');
var camera = new cv.VideoCapture(0);
var server = net.createServer();
server.listen('50007', '127.0.0.1');
server.on('connection', function(socket){
camera.read(function(image){
image.save('original.jpg');
fs.readFile('original.jpg', 'base64', function(err, image){
socket.write(image, 'base64', function(){
socket.end();
});
});
});
});
On the client I loop until the FIN is received from the server. Here is the client code:
var net = require('net');
var fs = require('fs');
var client = new net.Socket();
var buffer ='';
client.setEncoding('base64');
client.connect('50007', '127.0.0.1', function(){
console.log('Connecting to server...');
});
client.on('data', function(data){
buffer += data;
});
client.on('end', function(){
var dataBuffer = new Buffer(buffer, 'base64');
fs.writeFile('copy.jpg', dataBuffer, function(err){
if(err){
console.log(err);
}
});
});
The problem is that the entire image does not actually get sent. When I open the received file, copy.jpg, there is always a chunk missing at the bottom.
In the final version the goal is to send one jpg after another and delimit the end of each 'jpg' via a keyword such as 'EndOfFile'. I tried to do this by appending the keyword 'EndOfFile' to my base64 encoded image before sending but on the receiving end that really got screwed up.
Sample Advanced Server:
fs.readFile('original.jpg', 'base64', function(err, image){
image += 'EndOfFile';
socket.write(image, 'base64');
});
One the client side the loop would examine each chunk of data for the keyword and if it found it then whatever is in the buffer would be written to file and the buffer reset, ready for the next file.
Sample Advanced Client
client.on('data', function(data){
if(data.indexOf('EndOfFile') > 0){
buffer += data.substr(0, data.indexOf('EndOfLine'));
var dataBuffer = new Buffer(buffer, 'base64');
fs.writeFile('copy.jpg', dataBuffer, function(err){
if(err){
console.log(err);
}
});
buffer = '';
} else {
buffer += data;
}
});
I've gotten this to work in Python so I think my logic is correct but I'm not as confortable in NodeJS.
If someone could tell me if this is a sane way to do this and where may I have gone wrong.
Thanks in advance!
I suspect you're seeing end event while the last bit of data is still buffered.
Try waiting for the close event rather than the end event. I'm not sure about sockets, but in other Node APIs like spawn, the end event is fired early, before related streams are flushed, so there may still be buffered data waiting.
You could avoid managing this yourself by piping. Use fs.createWriteStream() and .pipe() the socket stream to the file.