Send file over tcp socket via proxy with streams - node.js

What i tried to achieve with node.js/io.js, is to send a file from one server to another one via a proxy. To avoid memory buffering i want to use streams.
The proxy should be able to connect to multiple targets dynamically. The target connection information for the proxy should be send prior to the filedata.
With normal socket communication and buffering it is not a problem. But how or in general can this be done with streams??
var net = require('net');
var fs = require('fs');
//create readstream from file
var myFile = fs.createReadStream('E:/sample.tar.gz');
// Proxy server
//####################################################################################################
var proxy = net.createServer(function (socket) {
// Create a new connection to the TCP server
var client = net.connect('9010');
// 2-way pipe between client and TCP server
socket.pipe(client).pipe(socket);
}).listen(9000);
// Targetserver
//####################################################################################################
var server = net.createServer(function (socket) {
// create filestream to write data into file
var destfile = fs.createWriteStream('E:/sample_copy.tar.gz')
socket.on('data', function (buffer) {
console.log('Get data on targetserver...');
// write buffer to file
destfile.write(buffer);
});
socket.on('end', function () {
// release file from writestream
destfile.end();
});
}).listen(9010);
// Client
//####################################################################################################
// Send file to proxy
var client = new net.Socket();
// connect to proxy
client.connect('9000', '127.0.0.1', function () {
console.log('Connection to proxy opened');
});
// send data to proxy
myFile.pipe(client);
// read response from taget
client.on('data', function(data) {
console.log('Response: ' + data);
// close the client socket completely
client.destroy();
});
// Add a 'close' event handler for the client socket
client.on('close', function() {
console.log('Connection to proxy closed');
});
Any hint to a good tutorial is also welcome.
TMOE

socket.write() already uses streams under the hood so you don't need to do anything special. Just send it the usual Buffer object or string and it will use a stream.
From the current source code of io.js, here's what happens when you use socket.write():
Socket.prototype.write = function(chunk, encoding, cb) {
if (typeof chunk !== 'string' && !(chunk instanceof Buffer))
throw new TypeError('invalid data');
return stream.Duplex.prototype.write.apply(this, arguments);
};
And stream is declared like this:
const stream = require('stream');
Apologies if I've misunderstood your question/requirements! By all means, clarify if I have misunderstood you and I'll try again (or delete this answer so it's not a distraction).

Related

How to connect multiple socket clients nodejs

Scenario is i have a server where socket(1) runs i have one more server where socket(2) client connects to socket(1)
I have one browser socket which connects to socket(1)
Idea is to do request from browser and bring data from socket(2) server
Not sure how to difference between socket clients as all the sockets are similar to socket(1)
Ideally there will be multiple browser sockets and multiple socket(2) clients
Browser sockets can make request to any of the socket(2) clients
How to implement it using nodejs socket.io
Server
socket.on('action', (action) => {
if(action.type === 'server/hello'){
io.sockets.emit('broadcast',{ description: clients + ' clients connected!'});
console.log('Got hello data!', action.data);
}
});
Browser client
var socket = io.connect('localhost:3000', {reconnect: true});
socket.on('connect', function(data) {
socket.emit('joined', 'Hello World from client this is client plxx');
});
socket.on('response2', function(data) {
console.log("got it ", data);
$('#messages').append($('<li>').text(JSON.stringify(data)));
});
Server client
var io = require('socket.io-client');
var socket = io.connect('http://localhost:3000', {reconnect: true});
socket.on('broadcast', function (t) {
socket.emit("data", {data: 32})
console.log('broadcast! my host is est');
});
i should be able to communicate between socket clients
What I understood from your question is: you need to differentiate between sockets from different clients.
To solve that I would suggest simply emitting the socket source from the client on connect.
And on the server split the sockets into two lists.
Example:
Server
const BROWSER_CLIENTS = {};
const SERVER_CLIENTS = {};
io.on("connection", socket => {
socket.on("source", payload => {
if (payload == "browser")
BROWSER_CLIENTS[socket.id] = socket;
else if (payload == "server")
SERVER_CLIENTS[socket.id] = socket;
});
socket.on("disconnect", () => {
delete BROWSER_CLIENTS[socket.id];
delete SERVER_CLIENTS[socket.id];
});
});
Browser Client
socket.on("connect", () => {
socket.emit("source", "browser");
});
Server Client
socket.on("connect", () => {
socket.emit("source", "server");
});
Now when you receive an event you can detect from which source it originated. And if you need to send to all sockets of one type of clients you can simply do this:
Server
for (let i in BROWSER_CLIENTS)
BROWSER_CLIENTS[i].emit("Hello Browsers")
for (let i in SERVER_CLIENTS)
SERVER_CLIENTS[i].emit("Hello Servers")
EDIT: I found this link and thought you could make use of it. Socket.io Rooms

Meaning of xserversock and xclientsock in ssh2 library for x11 forwarding

I'm trying to understand how x11 port forwarding inside SSH connections works programmatically.
In particular, I'm trying to understand this part of the README that is part of the code recipes inside Node's SSH2 client library. Once the client is instantiated, what do the variables xserversock and xclientsock do? Why does xclientsock pipe data back to itself through xserversock? Finally, does xserversock actually connect to 127.0.0.1:6000, or to 10.13.23.10:6000?
var net = require('net');
var Client = require('ssh2').Client;
var conn = new Client();
conn.on('x11', function(info, accept, reject) {
var xserversock = new net.Socket();
xserversock.on('connect', function() {
var xclientsock = accept();
xclientsock.pipe(xserversock).pipe(xclientsock);
});
// connects to localhost:0.0
xserversock.connect(6000, 'localhost');
});
conn.on('ready', function() {
conn.exec('xeyes', { x11: true }, function(err, stream) {
if (err) throw err;
var code = 0;
stream.on('end', function() {
if (code !== 0)
console.log('Do you have X11 forwarding enabled on your SSH server?');
conn.end();
}).on('exit', function(exitcode) {
code = exitcode;
});
});
}).connect({
host: '10.13.23.10',
username: 'foo',
password: 'bar'
});
xserversock is the connection to the X server running locally on your machine. xclientsock is the stream representing the incoming X client request (e.g. xeyes in the example) to your local X server.
xclientsock.pipe(xserversock).pipe(xclientsock); is not xclientsock piping to itself, it is merely allowing data to flow in both directions between the local X server and the remote X client. pipe() always returns the stream passed to it, to allow for easy chaining of streams. So this one line of code is equivalent to:
xclientsock.pipe(xserversock);
xserversock.pipe(xclientsock);
Finally, remember xserversock is a socket that is connecting to your local X server. This is evident with the .connect() arguments passed to the socket, so it will be connecting to 127.0.0.1:6000.

Node pipe stops working

My client sends an image file to the server. It works 5 times and then it suddenly stops. I am pretty new using streams and pipe so I am not sure what I am doing wrong.
Server Code
http.createServer(function(req, res) {
console.log("File received");
// This opens up the writeable stream to `output`
var name = "./test"+i+".jpg";
var writeStream = fs.createWriteStream(name);
// This pipes the POST data to the file
req.pipe(writeStream);
req.on('end', function () {
console.log("File saved");
i++;
});
// This is here incase any errors occur
writeStream.on('error', function (err) {
console.log(err);
});
}).listen(3000);
Client code
var request = require('request');
var fs = require('fs');
setInterval(function () {
var readStream = fs.createReadStream('./test.jpg');
readStream.on('open', function () {
// This just pipes the read stream to the response object (which goes to the client)
readStream.pipe(request.post('http://192.168.1.100:3000/test'));
console.log("Send file to server");
});
}, 1000);
Behaves like a resource exhaustion issue. Not sure which calls throw errors and which just return. Does the server connect on the 6th call? Does the write stream open? Does the pipe open?
Try ending the connection and closing the pipe after the image is saved. Maybe close the write stream too, don't remember if node garbage collects file descriptors.
I had to do the following on the server side to make this work :
res.statusCode = 200;
res.end();

Proper way to pipe buffers in Node JS

Take a look:
var Client = require('ftp');
var fs = require('fs');
var c = new Client();
c.on('ready', function() {
c.get('foo.txt', function(err, stream) {
if (err) throw err;
stream.once('close', function() { c.end(); });
stream.pipe(fs.createWriteStream('foo.local-copy.txt'));
});
});
// connect to localhost:21 as anonymous
c.connect();
This piece of code is from https://www.npmjs.org/package/ftp. Basically it opens a read stream and pipes it into a write stream. At the end It closes the connection from the source.
Does the pipe method close the target stream after the piped stream (source) is closed? I couldn't find it on the API Documentation.
I made some test that from witch I can conclude it does it but I am no sure.
The destination stream is closed when the source emits an end event. This is documented in Stream.pipe:
By default end() is called on the destination when the source stream
emits end, so that destination is no longer writable.
This allows calls of the form:
var http = require('http'),
fs = require('fs');
http.createServer(function (req, res) {
fs.createReadStream('path/to/file').pipe(res);
}).listen(3000);
If end wasn't called on the response object, the request would time out.
This would make the request time out:
fs.createReadStream('path/to/file').pipe(res, {end: false});

Creating a server listening on a `net` stream and reply using Node.js

Can someone give me a working example on how to create a server listening on the stream and reply when there is a request coming through.
Here's what I have so far:
var port = 4567,
net = require('net');
var sockets = [];
function receiveData(socket, data) {
console.log(data);
var dataToReplyWith = ' ... ';
// ... HERE I need to reply somehow with something to the client that sent the initial data
}
function closeSocket(socket) {
var i = sockets.indexOf(socket);
if (i != -1) {
sockets.splice(i, 1);
}
}
var server = net.createServer(function (socket) {
console.log('Connection ... ');
sockets.push(socket);
socket.setEncoding('utf8');
socket.on('data', function(data) {
receiveData(socket, data);
})
socket.on('end', function() {
closeSocket(socket);
});
}).listen(port);
Will the socket.write(dataToReplyWith); do it?
Yes, you can just write to the socket whenever (as long as the socket is still writable of course). However the problem you may run into is that one data event may not imply a complete "request." Since TCP is just a stream, you can get any amount of data which may not align along your protocol message boundaries. So you could get half a "request" on one data event and then the other half on another, or the opposite: multiple "requests" in a single data event.
If this is your own custom client/server design and you do not already have some sort of protocol in place to determine "request"/"response" boundaries, you should incorporate that now.

Resources