Node.js Net Sockets: Can't connect to multiple IP's - node.js

i want to readout data from 2 diffrent IP adresses. But i always get "EALREADY" Error.
Which means that the Socket is "already in use", although i'm destroying the socket after i received the data.
If i remove the for loop and only readout 1 adress, it works just fine...
If i'm adding timeouts they won't fire... and i don't know why either.
var socket = new net.Socket();
var test = [];
test[0] = '10.5.0.5';
test[1] = '10.5.0.11';
for(var i=0;i<test.length;i++) {
socket.connect('2711', test[i], function() {
socket.write('d\n');
console.log("1");
console.log(test[i]);
socket.on('data', function(data) {
data = data.toString();
console.log(data);
socket.destroy();
console.log("2");
});
});
}
Help highly appreciated.
Thanks in advance.

The "For loop" is synchronous but ".connect" and ".on('data'" are asynchronous, therefore the for loop finished before the first connection happens. The second connection attempt happens when first one is still in progress, that is why you get connection error.
You should try a recursive function, advancing step by step. For example I create a function that receives an array, and iterate the first element of array, connect, and on data close it and call again the function if array still has more items.
var socket = new net.Socket();
var test = [];
test[0] = '10.5.0.5';
test[1] = '10.5.0.11';
function testIP(test){
socket.connect('2711', test.shift(), function() {
socket.write('d\n');
console.log("1");
socket.on('data', function(data) {
data = data.toString();
console.log(data);
socket.destroy();
console.log("2");
if( test.length > 0 ){
testIP(test);
}
});
}
testIP(test);
Bonus: There are libraries in Node.js to work with asynchronous tasks

Related

How to disconnect a socket after streaming data?

I am making use of "socket.io-client" and "socket.io stream" to make a request and then stream some data. I have the following code that handles this logic
Client Server Logic
router.get('/writeData', function(req, res) {
var io = req.app.get('socketio');
var nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
var nameNodeData = {};
async.waterfall([
checkForDataNodes,
readFileFromS3
], function(err, result) {
if (err !== null) {
res.json(err);
}else{
res.json("Finished Writing to DN's");
}
});
function checkForDataNodes(cb) {
nameNodeSocket.on('nameNodeData', function(data) {
nameNodeData = data;
console.log(nameNodeData);
cb(null, nameNodeData);
});
if (nameNodeData.numDataNodes === 0) {
cb("No datanodes found");
}
}
function readFileFromS3(nameNodeData, cb) {
for (var i in nameNodeData['blockToDataNodes']) {
var IP = nameNodeData['blockToDataNodes'][i]['ipValue'];
var dataNodeSocket = io.connect('http://'+ IP +":5000");
var ss = require("socket.io-stream");
var stream = ss.createStream();
var byteStartRange = nameNodeData['blockToDataNodes'][i]['byteStart'];
var byteStopRange = nameNodeData['blockToDataNodes'][i]['byteStop'];
paramsWithRange['Range'] = "bytes=" + byteStartRange.toString() + "-" + byteStopRange.toString();
//var file = require('fs').createWriteStream('testFile' + i + '.txt');
var getFileName = nameNodeData['blockToDataNodes'][i]['key'].split('/');
var fileData = {
'mainFile': paramsWithRange['Key'].split('/')[1],
'blockName': getFileName[1]
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
s3.getObject(paramsWithRange).createReadStream().pipe(stream);
//dataNodeSocket.disconnect();
}
cb(null);
}
});
Server Logic (that gets the data)
var dataNodeIO = require('socket.io')(server);
var ss = require("socket.io-stream");
dataNodeIO.on('connection', function(socket) {
console.log("Succesfully connected!");
ss(socket).on('sendData', function(stream, data) {
var IP = data['ipValue'];
var blockName = data['blockName'];
var mainFile = data['mainFile'];
dataNode.makeDir(mainFile);
dataNode.addToReport(mainFile, blockName);
stream.pipe(fs.createWriteStream(mainFile + '/' + blockName));
});
});
How can I properly disconnect the connections in function readFileFromS3. I have noticed using dataNodeSocket.disconnect() at the end does not work as I cannot verify the data was received on the 2nd server. But if I comment it out, I can see the data being streamed to the second server.
My objective is to close the connections in Client Server side
It appears that the main problem with closing the socket is that you weren't waiting for the stream to be done writing before trying to close the socket. So, because the writing is all asynchronous and finishes sometime later, you were trying to close the socket before the data had been written.
Also because you were putting asynchronous operations inside a for loop, you were also running all your operations in parallel which may not be exactly what you want as it makes error handling more difficult and server load more difficult.
Here's the code I would suggest that does the following:
Create a function streamFileFromS3() that streams a single file and returns a promise that will notify when it's done.
Use await in a for loop with that streamFileFromS3() to serialize the operations. You don't have to serialize them, but then you would have to change your error handling to figure out what to do if one errors while the others are already running and you'd have to be more careful about concurrency issues.
Use try/catch to catch any errors from streamFileFromS3().
Add error handling on the stream.
Change all occurrences of data['propertyName'] to data.propertyName. The only time you need to use brackets is if the property name contains a character that is not allowed in a Javascript identifier or if the property name is in a variable. Otherwise, the dot notation is preferred.
Add socket.io connection error handling logic for both socket.io connections.
Set returned status to 500 when there's an error processing the request
So, here's the code for that:
const ss = require("socket.io-stream");
router.get('/writeData', function(req, res) {
const io = req.app.get('socketio');
function streamFileFromS3(ip, data) {
return new Promise((resolve, reject) => {
const dataNodeSocket = io.connect(`http://${ip}:5000`);
dataNodeSocket.on('connect_error', reject);
dataNodeSocket.on('connect_timeout', () {
reject(new Error(`timeout connecting to http://${ip}:5000`));
});
dataNodeSocket.on('connection', () => {
// dataNodeSocket connected now
const stream = ss.createStream().on('error', reject);
paramsWithRange.Range = `bytes=${data.byteStart}-${data.byteStop}`;
const filename = data.key.split('/')[1];
const fileData = {
'mainFile': paramsWithRange.Key.split('/')[1],
'blockName': filename
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
// get S3 data and pipe it to the socket.io stream
s3.getObject(paramsWithRange).createReadStream().on('error', reject).pipe(stream);
stream.on('close', () => {
dataNodeSocket.disconnect();
resolve();
});
});
});
}
function connectError(msg) {
res.status(500).send(`Error connecting to ${NAMENODE_ADDRESS}`);
}
const nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
nameNodeSocket.on('connect_error', connectError).on('connect_timeout', connectError);
nameNodeSocket.on('nameNodeData', async (nameNodeData) => {
try {
for (let item of nameNodeData.blockToDataNodes) {
await streamFileFromS3(item.ipValue, item);
}
res.json("Finished Writing to DN's");
} catch(e) {
res.status(500).json(e);
}
});
});
Other notes:
I don't know what paramsWithRange is as it is not declared here and when you were doing everything in parallel, it was getting shared among all the connections which is asking for a concurrency issue. In my serialized implementation, it's probably safe to share it, but the way it is now bothers me as it's a concurrency issue waiting to happen.

Simple publishing of many messages to Rabbitmq

I want to send the same messages many times in a row, but i need to use a loop. When I use a loop though, no messages are sent. I am using amqp in Nodejs.
Here is the working code for sending a single messages. What should I do to send many. I have already tried just wrapping a while loop around the connection.publish part and nothing was sent.
var amqp = require('amqp');
var connection = amqp.createConnection({url: "amqp://tester:tstpsswrd#10.4.52.115:5672"});
connection.on('ready', function () {
connection.queue('my-queue', function (q) {
connection.publish('my-queue', 'hi');
});
});
I'm positive that I am doing something stupid wrong here, or maybe missing something. First time with rabbitmq.
Update, Loop example
var amqp = require('amqp');
var connection = amqp.createConnection({url: "amqp://tester:tstpsswrd#10.4.52.115:5672"});
connection.on('ready', function () {
connection.queue('my-queue', function (q) {
while(true){
connection.publish('my-queue', 'hi');
}
});
});
In practical scenario you can not and should not be having a infinite loop as such for writing to a message broker. There have to be some event based thing or a proper defined number.
Try this code you can use the for loop according to your requirement:
var amqp = require('amqp');
var connection = amqp.createConnection({ host: 'localhost', port: 5672});
connection.on('ready', function () {
for(var i=0; i<1000; i++){
var status = writeOnQueue("testing the queue"+i);
}
});
function writeOnQueue(xml){
var msg = xml;
console.log(msg);
try{
connection.exchange('test-exchange', {confirm: true},function(exchange) {
publish = exchange.publish('my-queue',msg, { mandatory: false });
console.log('sent the message success test-exchange');
return true;
});
}
catch(e){
console.log('Some error occured.'+ e);
}
}

NodeJs net.Socket() events not firing

Trying to write a TCP client in Node v0.10.15 and I am having a little trouble getting data back from the server. I know that the server is working properly because I have 3-4 different clients written in different languages communicating with it.
Below is a snippet of a larger piece of code but this should get the point across.
The problem is: I'm expecting 2 packets coming back after writing to the socket (this part is not included in this example). I'm only seeing the "data" event being fired once. Is there something that I need to do to get node to resume reading from the Tcp stream? I can confirm that the server is sending 2 packets(The length and then the actual data) Any help would be appreciated.
var dc = require('./DataContracts.js');
var net = require('net');
require('buffertools').extend();
var client = net.Socket();
var isConnected = false;
var serverHost = '10.2.2.21';
var dataCallback;
var receivedBuffer = new Array();
function InitComm(buffer) {
if (!isConnected) {
client.connect(4987, serverHost, function() {
client.on('data', function(data) {
console.log('Received server packet...');
var buf = new Buffer(data);
receivedBuffer.push(buf);
client.resume();
});
client.on('end', function() {
if (receivedBuffer.length > 1) {
if (dataCallback !== undefined)
dataCallback(receivedBuffer);
}
});
client.on('close', function() {
//clean up
});
client.on('error', function(err) {
console.log('Error!: ' + err);
});
Communicate(buffer);
});
} else {
Communicate(buffer);
}
}
Turns out that node was combining both of the packets together. I must be missing a Carriage return on the first packet

Reading CSV file and sending data in intervals with websockets (Node, Socket.io)

I'm relatively new to Node and Express.js. I'm trying to create a websocket server to push CSV data in irregular intervals stored in the file itself, line after line.
The CSV structure is something like this:
[timeout [ms], data1, data2, data3 ...]
I've successfully created a websocket server which communicates with the client.
I'm looking for a best solution to effectively do something like this:
1. Read a line of the CSV file
2. Send a line with WebSockets
3. Pause the reading for a period of time stored in the first value of the row
4. Resume the reading after the interval has passed, and back to step 1.
So far, I got this far (please feel free to trash my code completely as it might be very wrong - as I said, I'm new to it. It seems like the pause() doesn't do anything.
var $ = require('jquery')
,csv = require('csv');
exports.index = function(server){
var io = require('socket.io').listen(server);
io.sockets.on('connection', function (socket) {
socket.on('startTransmission', function(msg) {
csv()
.from.path('C:/dev/node_express/csv/test.csv', { delimiter: ',', escape: '"' })
.on('record', function(row,index){
var rowArray = $.parseJSON(JSON.stringify(row));
var json = {},
that = this;
$.each(rowArray, function(i,value){
json[keys[i]] = value;
});
socket.emit('transmitDataData', json);
//this.pause(); //I guess around here is where I'd like to pause
// setTimeout(function(){
// that.resume(); //and resume here after the timeout, stored in the first value (rowArray[0])
// }, rowArray[0]);
});
});
});
};
The commented out code unfortunately does not work - All data is sent immediately, row after row, the function doesn't pause
I ran into the same sort of thing with another use case. The issue is that calling pause() on the stream pauses the underlying stream reading but not the csv record parsing, so the record event can get called with the remainder of the records that made up the last read stream chunk. I synchronized them, in my case, like this:
var rows=0, actions=0;
stream.on('record', function(row, index){
rows++;
// pause here, but expect more record events until the raw read stream is exhausted
stream.pause();
runner.do(row, function(err, result) {
// when actions have caught up to rows read, read more rows.
if (actions==rows) {
stream.resume();
}
});
});
In your case, I'd buffer the rows and release them with the timer. Here's an untested re-factoring just to give you an idea of what I mean:
var $ = require('jquery'),
csv = require('csv');
exports.index = function(server){
var io = require('socket.io').listen(server);
io.sockets.on('connection', function (socket) {
socket.on('startTransmission', function(msg) {
var timer=null, buffered=[], stream=csv().from.path('C:/dev/node_express/csv/test.csv', { delimiter: ',', escape: '"' });
function transmit(row) {
socket.emit('transmitDataData', row);
}
function drain(timeout) {
if (!timer) {
timer = setTimeout(function() {
timer = null;
if (buffered.length<=1) { // get more rows ahead of time so we don't run out. otherwise, we could skip a beat.
stream.resume(); // get more rows
} else {
var row = buffered.shift();
transmit(row);
drain(row[0]);
}
}, timeout);
}
}
stream.on('record', function(row,index){
stream.pause();
if (index == 0) {
transmit(row);
} else {
buffered.push(row);
}
drain(row[0]); // assuming row[0] contains a timeout value.
});
stream.on('end', function() {
// no more rows. wait for buffer to empty, then cleanup.
});
stream.on('error', function() {
// handle error.
});
});
};

How will Node.JS process loop control while there's async function called in the loop?

My situation is as follows: there's an array of IP address. I will test each IP to connect to a remote server. If one IP connects, the rest IPs are ignored and not going to be connected.
I used the following Node.JS codes to do the work, but it seems not working. Please give some hints. Thanks!
// serverip is a var of string splitted by ";", e.g. "ip1;ip2;ip3"
var aryServerIP = serverip.split(";");
console.log(aryServerIP);
var ipcnt = aryServerIP.length; // ipcnt = 3, for example
for (ip in aryServerIP)
{
console.log("to process: " + ipcnt); // error here: always print 3
var net = require('net');
var client = new net.Socket();
var rdpport = 3389;
client.connect(rdpport, aryServerIP[ip], function(){
console.log("socket connected to " + aryServerIP[ip] + ":" + rdpport);
client.destroy();
if (0 != ipcnt)
{
// do some real connection work about aryServerIP[ip].
ipcnt--;
}
});
client.on('error', function(){
console.log("fail to connect to " + aryServerIP[ip] + ":" + rdpport);
ipcnt--;
});
}
I know using ipcnt count to control the loop is bad, but I don't know how to control the Node.JS loop, when there's async function called in the loop...
Because your connect and error callbacks are both asynchronous, so they will both run after the for loop has completely finished.
What you need to do is set up a set of callbacks. For instance, rather than use a for loop, wrap your entire loop body in a function. If connect succeeds, then just do what you normally would, and if the error callback is called, then execute the wrapping function again. Something like this:
var async = require('async');
var net = require('net');
var rdpport = 3389;
function tryConnections(aryServerIP, callback){
function connect(i){
if (i === aryServerIP.length) return callback();
var client = new net.Socket();
client.connect(rdpport, aryServerIP[i], function(){
callback(client);
});
client.on('error', function(){
connect(i + 1)
});
}
connect(0)
}
tryConnections(serverip.split(";"), function(client){
if (client) // Successfully connected to something
else // all ips failed
});
Another solution would be to use the Async library.
function tryConnections(aryServerIP, callback){
async.detectSeries(aryServerIP, function(ip, cb){
var client = new net.Socket();
client.connect(rdpport, ip, function(){
cb(client);
});
client.on('error', function(){
cb();
});
}, callback);
}

Resources