event handling not working as expected nodejs - node.js

writing this little domain search app, it should sequentially search the .com of each item in an array, but it keeps searching for test1. even if I do a console log within the search function it tells me the value of x is test2, and test 3. do I need to remove the listener or something?
I get the following output
domain test1.com
Domain Name: TEST1.COM
domain test2.com
Domain Name: TEST1.COM
domain test3.com
Domain Name: TEST1.COM
app.js
var port = 43;
var net = require('net');
var host = 'whois.internic.net';
var dotCom = new net.Socket();
var c = 0;
var connections = 0;
var dotComStatus;
dotCom.setEncoding('ascii');
var searches = ['test1', 'test2', 'test3'];
search(searches.shift());
function chkconnections(z) {
if (connections <= 0) {
if (searches.length >= 1) {
process.nextTick(function() {
search(searches.shift());
});
}
}
}
function search(x) {
var q = "domain " + x + ".com\r\n";
dotCom.connect(port, host, function() {
dotCom.write(q);
console.log(q);
connections++;
});
dotCom.on('data', function(data) {
c++;
if (c == 2) {
dotComStatus = data.split('\n')[1];
dotCom.on('close', function() {
console.log(dotComStatus);
connections--;
chkconnections();
});
}
});
}

There are several obvious problems with this code. Firstly putting the close event inside the data event is a bad idea. If the connection closed before data was received that section of code would never be reached.
Next is there is a big problem with the section with
c++;
if (c == 2)
Since you never reset c to 0 the next line dotComStatus = data.split('\n')[1]; is never executed. But then the socket closes and the event closed is triggered. And this is executed again.
console.log(dotComStatus);
connections--;
chkconnections();
But the value of dotComStatus has not changed since c was equal to 0. There are many examples of how to do this connect/data/end flow that is common in NodeJS.
var port = 43;
var net = require('net');
var host = 'whois.internic.net';
var searches = ['test1', 'test2', 'test3'];
search(searches.shift());
function chkconnections(z) {
if(searches.length > 0)
search(searches.shift());
}
function search(x) {
var dotCom = new net.Socket();
dotCom.setEncoding('ascii');
var q = "domain " + x + ".com\r\n";
dotCom.connect(port, host, function() {
dotCom.write(q);
});
var data = ""; // holding place until socket closes
dotCom.on('data', function(chunk) {
data += chunk; // add chunk to data
});
dotCom.on("end", function() {
// socket closed
dotComStatus = data.split('\n')[7]; // Should be 'Domain Name: blah'
console.log(dotComStatus);
chkconnections(); // move on to next
});
};

Related

Socket.io fs.readFileSync for many connections at same time

This is not really a question, but I wonder to know if what I did is correct because its working!
So, lets to the question, I`m monitoring many interfaces (PPPoE clients) at same to know its traffic reading the statistics from linux.
I`m using npm packages: express, socket.io and socket.io-stream.
Client:
var sessionsAccel = $('table.accel').DataTable([]);
sessionsAccel.on('preDraw', function() {
$('.interfaceAccel').each(function(i) {
var t = $(this).data();
sockets['socket' + t.id].disconnect();
delete speeds['tx_bytes' + t.id];
delete speeds['rx_bytes' + t.id];
});
})
.on('draw', function() {
$('.interfaceAccel').each(function(i) {
var t = $(this).data();
sockets['socket' + t.id] = io.connect('http://172.16.101.2:3000/status', {
query: 'interface=' + t.interface,
'forceNew': true
});
sockets['socket' + t.id].on("connect", function() {
ss(sockets['socket' + t.id]).on('sendStatus', function(stream, data) {
if (typeof speeds['tx_bytes' + t.id] != 'undefined') {
var speedtx = (data.tx_bytes - speeds['tx_bytes' + t.id]) * 8 / 1000;
var speedrx = (data.rx_bytes - speeds['rx_bytes' + t.id]) * 8 / 1000;
if (speedtx > 1000) {
speedtx = speedtx / 1000;
speedtx = speedtx.toFixed(2);
speedtx_info = speedtx + ' Mbps';
} else {
speedtx = speedtx.toFixed(2);
speedtx_info = speedtx + ' kbps';
}
if (speedrx > 1000) {
speedrx = speedrx / 1000;
speedrx = speedrx.toFixed(2);
speedrx_info = speedrx + ' Mbps';
} else {
speedrx = speedrx.toFixed(2);
speedrx_info = speedrx + ' kbps';
}
$('.tx_' + t.id).html(speedtx_info);
$('.rx_' + t.id).html(speedrx_info);
}
speeds['tx_bytes' + t.id] = data.tx_bytes;
speeds['rx_bytes' + t.id] = data.rx_bytes;
});
});
});
})
Server:
const app = require('express')();
const http = require('http').createServer(app);
const io = require('socket.io')(http);
const ss = require('socket.io-stream');
const path = require('path');
const fs = require('fs');
function getIntInfo(interface) {
if(fs.existsSync('/sys/class/net/'+ interface +'/statistics/tx_bytes')) {
var tx_bytes = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/tx_bytes').toString();
var rx_bytes = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/rx_bytes').toString();
var tx_packets = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/tx_packets').toString();
var rx_packets = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/rx_packets').toString();
return {tx_bytes : tx_bytes, rx_bytes : rx_bytes, tx_packets: tx_packets, rx_packets: rx_packets};
}else
return false;
}
io.of('/status').on('connection', function(socket) {
var query = socket.handshake.query['interface'];
var timer = setInterval(function() {
var stream = ss.createStream();
var info = getIntInfo(query);
ss(socket).emit('sendStatus', stream, info);
}, 1000);
socket.on('disconnect', function(){
socket.disconnect(true);
//console.info('disconnected user (id=' + socket.id + ').');
});
})
http.listen(3000, function(){
console.log('listening on *:3000');
});
That's it, every row from Datatable (which is the interface) open a socket connection and retrieve the statistics.
My question is, this will mess up my server with many I/O reading these files?
Since you're doing this every second for every connected client, it seems like you should probably cache this data so it doesn't have to be read from the disk or sent over the wire when it hasn't changed to save both server load and bandwidth usage. But, the details of how to best do that depend upon knowledge about your particular application that you haven't included.
You can at least use asynchronous I/O like this:
const util = require('util');
const fs = require('fs');
const readFile = util.promisify(fs.readFile);
function getIntInfo(interface) {
function readInfo(name) {
return readFile('/sys/class/net/'+ interface +'/statistics/' + name).then(data => data.toString());
}
return Promise.all(
readFile('tx_bytes'),
readFile('rx_bytes'),
readFile('tx_packets'),
readFile('rx_packets')
).then(([tx_bytes, rx_bytes, tx_packets, rx_packets]) => {
return {tx_bytes, rx_bytes, tx_packets, rx_packets};
}).catch(err => {
console.log(err);
return false;
});
}
And, you have to stop the interval any time a client disconnects and change how it calls getIntInfo():
io.of('/status').on('connection', function(socket) {
var query = socket.handshake.query['interface'];
var timer = setInterval(function() {
getIntInfo(query).then(info => {
var stream = ss.createStream();
ss(socket).emit('sendStatus', stream, info);
});
}, 1000);
socket.on('disconnect', function(){
// stop the timer for this connection
clearInterval(timer);
});
});
Now that I think about it a bit more, you could improve scalability quite a bit by having just one interval timer that was reading the data and then sending that one set of data to all listening clients that had connected to the /status namespace. You would reduce the file reading from once per second for every client to just once per second for no matter how many clients.

Having issues with BitTorrent Protocol

I'm trying to make just a simple BitTorrent tracker for a school project. It's totally hacked together right now but I can't find where I'm going wrong. I'm wondering if I have a misunderstanding of what the server response should be. I am using node.js and express.
The server receives /GET requests with the ?info_hash data no problem. And i'm able to save that info into JSON files. The server is also able to respond to the clients using bencoding. The response is a dictionary which has an interval and a peers list. Inside the list is several dictionaries and each dictionary holds the ip and port of a peer.
Currently though none of the peers will connect to each other. I'll have my laptop on a separate network from my desktop and it will see the desktop as a potential peer, correct ip and port (as far as I know) but after a moment it drops off the peer list. I am using deluge and qBitTorrent on each client.
Here's the code for the app:
var express = require('express');
var app = express();
var fs = require("fs");
var contents = fs.readFileSync("data.json");
var data = JSON.parse(contents);
var findTorrent = function(data, hash) {
for(var i = 0; i < data.length; i++) {
if(data[i].info_hash === hash) {
return data[i];
}
}
return false;
}
var findID = function(data, qPort, qip) {
for(var i = 0; i < data.length; i++) {
//console.log(data[i].peer_id);
if(data[i].port == qPort && data[i].ip === qip) {
return true;
}
}
return false;
}
var findHash = function(data, id) {
for(var i = 0; i < data.length; i++) {
if(data[i].peer_id === id) {
return data[i];
}
}
return false;
}
function hashy (str) {
var url = str;
var hexval = '';
for(var i = 0; i < url.length; i++) {
if(url[i] !== '%') {
var code = url.charCodeAt(i);
var hex = code.toString(16);
hexval += hex;
} else {
hexval += url[i+1] + url[i+2];
i += 2;
}
}
return hexval;
}
app.get('/', function(req, res) {
console.log(req.query);
var info_hash = hashy(req.query.info_hash);
console.log(info_hash);
var peer_id = decodeURIComponent(req.query.peer_id);
var escaped = escape(req.query.peer_id);
console.log('escaped ' + escaped);
console.log('decoded ' + peer_id);
console.log('normal ' + req.query.peer_id);
var ip = req.connection.remoteAddress;
if(ip.substring(0,7) == '::ffff:') {
ip = ip.substring(7);
}
//var port = req.connection.remotePort;
var port = req.query.port;
console.log(ip);
var torrent = findTorrent(data, info_hash);
var completed;
if (torrent === false){
if(req.query.left === '0') {
completed = true;
} else {
completed = false;
}
var obj = { "info_hash" : info_hash, "peers" : [{ "peer_id" : peer_id, "ip" : ip, "port" : port, "completed" : completed }]};
data.push(obj);
torrent = obj;
//console.log(obj.peers);
}
else {
//figure out if completed
if(req.query.left == '0') {
completed = true;
} else {
completed = false;
}
var peer = findHash(torrent.peers, peer_id);
if(peer === false){
var obj = { "peer_id" : peer_id, "ip" : ip, "port" : port, "completed" : completed };
torrent.peers.push(obj);
}
else {
peer.ip = ip;
peer.port = port;
peer.completed = completed;
}
}
if(torrent) {
var response = bencode(torrent);
}
else {
response = 'error';
}
//console.log(data);
fs.writeFileSync("data.json", JSON.stringify(data, null, 2), 'utf-8');
res.send(response);
});
var bencode = function(torrent) {
var response = 'd8:intervali600e12:min intervali30e'
var complete = 0;
var incomplete = 0;
for(var i = 0; i < torrent.peers.length; i++) {
if(torrent.peers[i].completed === true) {
complete++;
} else {
incomplete++;
}
}
var response = response.concat('8:completei' + complete + 'e');
var response = response.concat('10:incompletei' + incomplete + 'e5:peersl');
for(var i = 0; i < torrent.peers.length; i++) {
response = response.concat('d');
response = response.concat('2:ip');
response = response.concat(torrent.peers[i].ip.length + ':');
response = response.concat(torrent.peers[i].ip);
//response = response.concat('7:peer id');
//response = response.concat(torrent.peers[i].peer_id.length + ':');
//response = response.concat(torrent.peers[i].peer_id);
response = response.concat('4:port');
response = response.concat('i' + torrent.peers[i].port + 'e');
response = response.concat('e');
}
response = response.concat('ee');
console.log(response);
return response;
}
app.listen(4000, function() {
console.log('Example app listening on port 4000!');
});
I'm able to connect to the tracker hosted on Amazon AWS and qBitTorrent reports it as "working". I also can see the GET request going out and the server response coming in via wireshark. The request has the following bencoded string which I believe is all that's necessary:
d8:intervali600e12:min intervali30e8:completei2e10:incompletei3e5:peersld2:ip13:73.66.138.2174:porti8999eed2:ip13:73.66.138.2174:porti63014eed2:ip13:73.66.138.2174:porti8999eed2:ip13:73.25.106.1804:porti6881eed2:ip13:73.66.249.1414:porti8999eeee
According to www.bittorrent.org all that is necessary in the response is an interval and a peer list mapped to a list of peers. Each peer needs id, ip, and port.
I've switched the port to the one that the client is reporting in the request and made sure that my torrent client has it's port forwarded and it seems to be working now. Though I'm still going to continue working on this. Currently I don't have a way to remove peers when they stop seeding/leeching.
Careful not to have a trailing carriage return in the tracker response. That makes the bencoded response invalid and some clients don't like it.
This seems to mostly be a issue between the peers and not the tracker. If they are both NATed, at least one of them needs to have the port forwarded throu the NAT for them to be able to connect to each other.
The port in the tracker response should be the one that the peer reports in the request.
The bencoded dict in the tracker response is not sorted, the order of the Keys complete, incomplete, interval, min interval, peers should be sorted as raw strings.
Some clients may have problems if they aren't.
Another thing is, the tracker response specified in BEP3, while still correct, has been obsoleted by the compact=1 response. All modern clients support 'compact'. While I'm not aware of any client that has dropped support for the legacy way, some trackers has.
Bram Cohen has said that "... non-support for the 'compact' extension is considered outright malbehavior today." post #5
A good resource about the BitTorrent protocol is https://wiki.theory.org/BitTorrentSpecification
This answer is a edited version of what was originaly posted as comments.

node.js server not always clearing data from previous run

I'm brand new to node.js (or javascript in general) and not sure what's going on here. Sometimes when I run my node.js server, right when it starts up it begins printing data from the last time it ran. Is there something that could be keeping everything from resetting on a new run? I have arduino XBee nodes connecting to this server. Here's my node.js code:
var SerialPort = require("serialport");
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var ON_DEATH = require('death');
// Set up serial port
var portName = process.argv[2],
portConfig = {
baudRate: 9600,
parser: SerialPort.parsers.readline("\n")
};
var sp = new SerialPort(portName, portConfig); // got rid of deprecation issue with sp4
// Respond with file when a GET request is made to the homepage: app.METHOD(PATH, HANDLER) '/'=homepage
app.get('/', function(req, res){
res.sendfile('index.html');
});
// Listen on the connection event for incoming sockets, and log it to the console.
io.on('connection', function(socket){
console.log('a user connected');
socket.on('disconnect', function(){
});
socket.on('chat message', function(msg){
io.emit('chat message', msg);
sp.write(msg + "\n");
});
});
// Make the http server listen on port 3000
http.listen(3000, function(){
console.log('listening on *:3000');
});
/////////////////////////////////////////////////////////////////////////////////////////////////
sp.on("open", function (err) {
if (err)
return console.log('Error opening port: ', err.message);
console.log('open');
// INFO VARIABLES
//var nodeCount = 0;
var pendingNodes = [];
var connectedNodes = [];
var buffer0;
var nodeInfo;
// Cleanup when termination signals are sent to process
ON_DEATH(function(signal, err) {
var death_msg = "Q";
sp.write(death_msg);
sp.close();
// zero out all variables
pendingNodes = [];
connectedNodes = [];
buffer0 = [];
nodeInfo = [];
console.log("\n\nSending reset signal to nodes.\n\n")
sp.flush(function(err,results){});
process.exit();
})
// listen for data, grab time, delimit
sp.on('data', function(data) {
var time = new Date();
buffer0 = data.split('\n'); // array of data till newline
// Split again into either "#ID" into [#ID] (if new broadcast), or "#ID Data" into [#ID, Data] (preconnected node)
nodeInfo = buffer0[0].split(' ');
//console.log(" nodeInfo: " + nodeInfo);
// DEBUGGING PRINT -- DELETE LATER
//console.log(" pendingNodes: " + pendingNodes + " connectedNodes: " + connectedNodes);
////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Receiving ID or [ID, Data] -- Testing all possible cases
// if first char
if (nodeInfo[0][0] == "#") { // could check up here for && no temp data too
// Brand new node (ID not in pendingNodes or connectedNodes)
if ((pendingNodes.indexOf(nodeInfo[0]) == -1) && (connectedNodes.indexOf(nodeInfo[0]) == -1)) {
console.log("Brand new node: " + nodeInfo[0])
pendingNodes.push(nodeInfo[0]);
}
// Pending node (ID in pendingNodes, but not in connectedNodes)
else if ((pendingNodes.indexOf(nodeInfo[0]) != -1) && ((connectedNodes.indexOf(nodeInfo[0]) == -1))) {
console.log(" Pending node: " + nodeInfo[0])
// send back ID to confirm handshake
sp.write(nodeInfo[0]);
// Remove from pendingNodes
var index = pendingNodes.indexOf(nodeInfo[0]);
if (index > -1) {
pendingNodes.splice(index, 1);
}
// Add to connectedNodes
//var
connectedNodes.push(nodeInfo[0]);
}
// Connected node (ID in connectedNodes, but not in pendingNodes)
else if ((pendingNodes.indexOf(nodeInfo[0]) == -1) && (connectedNodes.indexOf(nodeInfo[0]) != -1)) {
console.log(" Connected node: " + nodeInfo[0] + " " + nodeInfo[1]);
// Disconnect nodes with undefined temperatures to force restart
if (typeof nodeInfo[1] == 'undefined') {
console.log("Undefined data for node: " + nodeInfo[0] + ". Dropping connection to this node.");
var index = connectedNodes.indexOf(nodeInfo[0]);
if (index > -1) {
connectedNodes.splice(index, 1);
}
}
//var t = time.getTime();
// FIRST: fix bug -- sometimes temp showing as undefined
// Then:
// Update data properly
// Average data
// add 3rd element to nodes for time stamp, check that all timestamps are recent periodically
// Detect drop-offs and remove nodes
}
// Error (ID in both pendingNodes and connectedNodes [should not happen])
else {
console.log("Error: node exists in both pending and connected.")
}
}
else if (nodeInfo[0][0] != "#") {
// FOR DEBUGGING PURPOSES, DELETE LATER
console.log("Error 2: incoming data does not start with '#'.")
}
/* // placeholders for functionality to add
var CalcAverage = function() {
console.log("CalcAverage: * every 1 second(s) *");
}
setInterval(function(){ CalcAverage() },2000);
var CheckNodes = function() {
console.log("CheckNodes: * every 6 second(s) *");
}
setInterval(function(){ CheckNodes() },6000);
*/
}); // end data
}); // end open
And here's an example of old node data showing up somehow:
Some possibilities:
Old (or new data that you didn't know it sent) data from clients has been buffered somehow on the serial port, or socket.io auto-reconnects on the client browsers and starts sending data even though you didn't interact with the client browser web page.

TCP socket handling for multiplayer game on NodeJS

I have a multiplayer game where my server uses nodejs and TCPSocket (net.createServer) to communicate with a client.
I have thousands of clients connecting to the server and many people are complaining that they are constantly being disconnected from the server.
Here is how my server handles the connections now:
Init:
var net = require("net");
server = net.createServer(function(socket) {
socket.setTimeout(15000);
socket.setKeepAlive(true);
socket.myBuffer = "";
socket.msgsQue = [];
socket.on("data", onData);
socket.on("error", onError);
socket.on("end", onClientDisconnect);
socket.on("timeout", onClientDisconnect);
});
server.listen(port);
Sending to client:
var stringData = JSON.stringify({name:event, message:data});
if (!this.socket.msgsQue || typeof this.socket.msgsQue == "undefined")
this.socket.msgsQue = [];
this.socket.msgsQue.unshift(stringData);
var i = this.socket.msgsQue.length;
while(i--) {
if (this.socket.writable) {
var elem = this.socket.msgsQue[i];
this.socket.write(elem+"\0");
this.socket.msgsQue.splice(i, 1);
} else {
//Unable to write at index "i"
break;//will send the rest on the next attempt
}
}
When disconnected
var onClientDisconnect = function() {
this.myBuffer = "";
delete this.myBuffer;
this.msgsQue = [];
delete this.msgsQue;
this.destroy();
}
Receiving from client
var onData = function(data) {
if (!data || !data.toString()) return;
var raw = data.toString();
this.myBuffer += raw;
var d_index = this.myBuffer.indexOf('\0'); // Find the delimiter
// While loop to keep going until no delimiter can be found
var string;
while (d_index > -1) {
// Create string up until the delimiter
string = this.myBuffer.substring(0,d_index);
// Cuts off the processed chunk
this.myBuffer = this.myBuffer.substring(d_index+1);
onMessage(string, this);//handle
// Find the new delimiter
d_index = this.myBuffer.indexOf('\0');
}
}
A problem I notice is that msgsQue becomes huge because the socket is not writable, but disconnect handler is not fired (or hired later..)
Can you give me some advises on how to optimize this ?
I noticed that sometimes I get disconnected, but I can ping the server, so it is definitely a server-related problem. Can it be because of high load on the server itself?
(I do not want to use socket.io because the last year I had many problems with it like memory leaking, freezing the server app, no support, etc..)

Looking for a better way to do real time stock updates

I started with this project for my real time stock price update project.
This project works well when I am working with one or two stocks, but not when I want to update the price of hundreds of stocks at the same time. I'd like to know if I'm doing this the right way. Right now I fetch the data for all stocks in a for loop on the server, but the price update is very very slow. I'd like to know how to improve this.
I'd like to know how to update hundreds of stock prices each second, without affecting server performance.
I don't know if I should be sending the server a list of stocks I need from the client like: var ids = [ '', '', '', ... ], or if I can run those ids from the server itself.
Which is best: Stocks request from client to server, or from server to client?
Note: I will be using a different url to get stock price.
My server side code :
////
// CONFIGURATION SETTINGS
///
var PORT = 4000;
var FETCH_INTERVAL = 5000;
var PRETTY_PRINT_JSON = true;
///
// START OF APPLICATION
///
var express = require('express');
var http = require('http');
var io = require('socket.io');
var app = express();
var server = http.createServer(app);
var io = io.listen(server);
io.set('log level', 1);
server.listen(PORT);
var ticker = "";
app.get('/:ticker', function(req, res) {
ticker = req.params.ticker;
res.sendfile(__dirname + '/index.html');
});
io.sockets.on('connection', function(socket) {
var local_ticker = ticker;
ticker = "";
//Run the first time immediately
get_quote(socket, local_ticker);
//Every N seconds
var timer = setInterval(function() {
var ids = ['AAPL', '' , ..........100 stocks];
var l = ids.length;
for(var i=0; i<l; i++){
get_quote(socket, ids[i])
}
}, FETCH_INTERVAL);
socket.on('disconnect', function () {
clearInterval(timer);
});
});
function get_quote(p_socket, p_ticker) {
http.get({
host: 'www.google.com',
port: 80,
path: '/finance/info?client=ig&q=' + p_ticker
}, function(response) {
response.setEncoding('utf8');
var data = "";
response.on('data', function(chunk) {
data += chunk;
});
response.on('end', function() {
if(data.length > 0) {
try {
var data_object = JSON.parse(data.substring(3));
} catch(e) {
return;
}
var quote = {};
quote.ticker = data_object[0].t;
quote.exchange = data_object[0].e;
quote.price = data_object[0].l_cur;
quote.change = data_object[0].c;
quote.change_percent = data_object[0].cp;
quote.last_trade_time = data_object[0].lt;
quote.dividend = data_object[0].div;
quote.yield = data_object[0].yld;
p_socket.emit('quote', PRETTY_PRINT_JSON ? JSON.stringify(quote, true, '\t') : JSON.stringify(quote));
}
});
});
}
My client side code :
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.js"></script>
<script type="text/javascript" src="http://localhost:4000/socket.io/socket.io.js"></script>
<script type="text/javascript">
$(document).ready(function() {
var socket = io.connect("http://localhost:4000");
socket.on('quote', function(data) {
var data = $("<pre>" + data + "</pre><hr />");
$("#quotes").append(data);
$("html, body").animate({ scrollTop: $(document).height() }, 100);
$(data).show("slide", { direction: "up" }, 250);
$(data).effect("highlight", {}, 1500);
});
});
</script>
<body>
<div id="quotes"></div>
</body>
I think that sending the desired ID's from the client side will make your application more flexible and easy to use. You can still write your server in a way that will be performant.
'For loops' will block Node's event loop. For async actions that need to iterate over an array I recommend:
https://github.com/caolan/async
Specifically 'async.each'
I haven't run your code but my gut tells me that my browser would not enjoy that much DOM manipulation all at once. I think that breaking the groups into smaller pieces would help. For instance:
Take your array of ID's and break it into 5. Then stagger the intervals of each.
var arr1 = [...]
var arr2 = [...]
var arr3 = [...]
var arr4 = [...]
var arr5 = [...]
setTimeout(doWorkOnArray(arr1), 4000)
setTimeout(doWorkOnArray(arr2), 3000)
setTimeout(doWorkOnArray(arr3), 2000)
setTimeout(doWorkOnArray(arr4), 1000)
setTimeout(doWorkOnArray(arr5), 0)
function doWorkOnArray(arr) {
setInterval(getData(arr), 5000)
}
Alternatively you could look at setting up a Master/Worker with something like Redis to queue the work. I think this would be the best performance. Check out:
https://github.com/Automattic/kue

Resources