Looking for a better way to do real time stock updates - node.js

I started with this project for my real time stock price update project.
This project works well when I am working with one or two stocks, but not when I want to update the price of hundreds of stocks at the same time. I'd like to know if I'm doing this the right way. Right now I fetch the data for all stocks in a for loop on the server, but the price update is very very slow. I'd like to know how to improve this.
I'd like to know how to update hundreds of stock prices each second, without affecting server performance.
I don't know if I should be sending the server a list of stocks I need from the client like: var ids = [ '', '', '', ... ], or if I can run those ids from the server itself.
Which is best: Stocks request from client to server, or from server to client?
Note: I will be using a different url to get stock price.
My server side code :
////
// CONFIGURATION SETTINGS
///
var PORT = 4000;
var FETCH_INTERVAL = 5000;
var PRETTY_PRINT_JSON = true;
///
// START OF APPLICATION
///
var express = require('express');
var http = require('http');
var io = require('socket.io');
var app = express();
var server = http.createServer(app);
var io = io.listen(server);
io.set('log level', 1);
server.listen(PORT);
var ticker = "";
app.get('/:ticker', function(req, res) {
ticker = req.params.ticker;
res.sendfile(__dirname + '/index.html');
});
io.sockets.on('connection', function(socket) {
var local_ticker = ticker;
ticker = "";
//Run the first time immediately
get_quote(socket, local_ticker);
//Every N seconds
var timer = setInterval(function() {
var ids = ['AAPL', '' , ..........100 stocks];
var l = ids.length;
for(var i=0; i<l; i++){
get_quote(socket, ids[i])
}
}, FETCH_INTERVAL);
socket.on('disconnect', function () {
clearInterval(timer);
});
});
function get_quote(p_socket, p_ticker) {
http.get({
host: 'www.google.com',
port: 80,
path: '/finance/info?client=ig&q=' + p_ticker
}, function(response) {
response.setEncoding('utf8');
var data = "";
response.on('data', function(chunk) {
data += chunk;
});
response.on('end', function() {
if(data.length > 0) {
try {
var data_object = JSON.parse(data.substring(3));
} catch(e) {
return;
}
var quote = {};
quote.ticker = data_object[0].t;
quote.exchange = data_object[0].e;
quote.price = data_object[0].l_cur;
quote.change = data_object[0].c;
quote.change_percent = data_object[0].cp;
quote.last_trade_time = data_object[0].lt;
quote.dividend = data_object[0].div;
quote.yield = data_object[0].yld;
p_socket.emit('quote', PRETTY_PRINT_JSON ? JSON.stringify(quote, true, '\t') : JSON.stringify(quote));
}
});
});
}
My client side code :
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.js"></script>
<script type="text/javascript" src="http://localhost:4000/socket.io/socket.io.js"></script>
<script type="text/javascript">
$(document).ready(function() {
var socket = io.connect("http://localhost:4000");
socket.on('quote', function(data) {
var data = $("<pre>" + data + "</pre><hr />");
$("#quotes").append(data);
$("html, body").animate({ scrollTop: $(document).height() }, 100);
$(data).show("slide", { direction: "up" }, 250);
$(data).effect("highlight", {}, 1500);
});
});
</script>
<body>
<div id="quotes"></div>
</body>

I think that sending the desired ID's from the client side will make your application more flexible and easy to use. You can still write your server in a way that will be performant.
'For loops' will block Node's event loop. For async actions that need to iterate over an array I recommend:
https://github.com/caolan/async
Specifically 'async.each'
I haven't run your code but my gut tells me that my browser would not enjoy that much DOM manipulation all at once. I think that breaking the groups into smaller pieces would help. For instance:
Take your array of ID's and break it into 5. Then stagger the intervals of each.
var arr1 = [...]
var arr2 = [...]
var arr3 = [...]
var arr4 = [...]
var arr5 = [...]
setTimeout(doWorkOnArray(arr1), 4000)
setTimeout(doWorkOnArray(arr2), 3000)
setTimeout(doWorkOnArray(arr3), 2000)
setTimeout(doWorkOnArray(arr4), 1000)
setTimeout(doWorkOnArray(arr5), 0)
function doWorkOnArray(arr) {
setInterval(getData(arr), 5000)
}
Alternatively you could look at setting up a Master/Worker with something like Redis to queue the work. I think this would be the best performance. Check out:
https://github.com/Automattic/kue

Related

Socket.io online users list doesn't work properly in production

I'm trying to implement a online users page using Socket.io in a exprses web server. The code I'm using is listed below. When I'm using the website in the local network it works fine. But in production Socket.io behaves in a inexplicable way
app.js
...
const app = express()
const http = require('http').createServer(app)
const io = require('socket.io')(http)
let activeUsers = []
...
io.on('connection', function(socket){
let userName
socket.on('user viewing', function(user){
let inActiveUsers = false
for(var x = 0; x < activeUsers.length; x++) {
if(activeUsers[x].username == user) {
inActiveUsers = true
activeUsers[x].last_activity = MDate.getDateTime()
break
}
}
if(!inActiveUsers) {
activeUsers.push({"username": user, "last_activity": MDate.getDateTime()})
}
userName = user
io.emit('active users', activeUsers)
})
socket.on('disconnect', function(){
let userIndex = -1
for(var x = 0; x < activeUsers.length; x++) {
if(activeUsers[x].username == userName) {
userIndex = x
break
}
}
if(userIndex != -1) {
activeUsers[x].last_activity = 'NULL'
}
io.emit('active users', activeUsers)
})
})
...
In every other page I have implemented the following client side JavaScript code
<script>
var socket = io();
socket.emit('user viewing', '<%= user.username %>');
</script>
activeUsers.ejs
<script src="/socket.io/socket.io.js"></script>
<script>
var socket = io();
socket.on('active users', function(activeUsers){
document.getElementById('users').innerHTML = '';
for(var x = 0; x < activeUsers.length; x++) {
if(activeUsers[x].last_activity != 'NULL') {
$('#users').append('<tr><td>' + activeUsers[x].username + '</td><td>' + activeUsers[x].last_activity + '</td></tr>');
}
}
});
</script>
When I am on localhost the site shows all active users properly. But when I'm using the site in production. The site shows only few users who are online even though others are online too. Sometimes it doesn't even show the account that is viewing the page. I'm using PM2 for production. What could be the issue for this?

Socket.io fs.readFileSync for many connections at same time

This is not really a question, but I wonder to know if what I did is correct because its working!
So, lets to the question, I`m monitoring many interfaces (PPPoE clients) at same to know its traffic reading the statistics from linux.
I`m using npm packages: express, socket.io and socket.io-stream.
Client:
var sessionsAccel = $('table.accel').DataTable([]);
sessionsAccel.on('preDraw', function() {
$('.interfaceAccel').each(function(i) {
var t = $(this).data();
sockets['socket' + t.id].disconnect();
delete speeds['tx_bytes' + t.id];
delete speeds['rx_bytes' + t.id];
});
})
.on('draw', function() {
$('.interfaceAccel').each(function(i) {
var t = $(this).data();
sockets['socket' + t.id] = io.connect('http://172.16.101.2:3000/status', {
query: 'interface=' + t.interface,
'forceNew': true
});
sockets['socket' + t.id].on("connect", function() {
ss(sockets['socket' + t.id]).on('sendStatus', function(stream, data) {
if (typeof speeds['tx_bytes' + t.id] != 'undefined') {
var speedtx = (data.tx_bytes - speeds['tx_bytes' + t.id]) * 8 / 1000;
var speedrx = (data.rx_bytes - speeds['rx_bytes' + t.id]) * 8 / 1000;
if (speedtx > 1000) {
speedtx = speedtx / 1000;
speedtx = speedtx.toFixed(2);
speedtx_info = speedtx + ' Mbps';
} else {
speedtx = speedtx.toFixed(2);
speedtx_info = speedtx + ' kbps';
}
if (speedrx > 1000) {
speedrx = speedrx / 1000;
speedrx = speedrx.toFixed(2);
speedrx_info = speedrx + ' Mbps';
} else {
speedrx = speedrx.toFixed(2);
speedrx_info = speedrx + ' kbps';
}
$('.tx_' + t.id).html(speedtx_info);
$('.rx_' + t.id).html(speedrx_info);
}
speeds['tx_bytes' + t.id] = data.tx_bytes;
speeds['rx_bytes' + t.id] = data.rx_bytes;
});
});
});
})
Server:
const app = require('express')();
const http = require('http').createServer(app);
const io = require('socket.io')(http);
const ss = require('socket.io-stream');
const path = require('path');
const fs = require('fs');
function getIntInfo(interface) {
if(fs.existsSync('/sys/class/net/'+ interface +'/statistics/tx_bytes')) {
var tx_bytes = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/tx_bytes').toString();
var rx_bytes = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/rx_bytes').toString();
var tx_packets = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/tx_packets').toString();
var rx_packets = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/rx_packets').toString();
return {tx_bytes : tx_bytes, rx_bytes : rx_bytes, tx_packets: tx_packets, rx_packets: rx_packets};
}else
return false;
}
io.of('/status').on('connection', function(socket) {
var query = socket.handshake.query['interface'];
var timer = setInterval(function() {
var stream = ss.createStream();
var info = getIntInfo(query);
ss(socket).emit('sendStatus', stream, info);
}, 1000);
socket.on('disconnect', function(){
socket.disconnect(true);
//console.info('disconnected user (id=' + socket.id + ').');
});
})
http.listen(3000, function(){
console.log('listening on *:3000');
});
That's it, every row from Datatable (which is the interface) open a socket connection and retrieve the statistics.
My question is, this will mess up my server with many I/O reading these files?
Since you're doing this every second for every connected client, it seems like you should probably cache this data so it doesn't have to be read from the disk or sent over the wire when it hasn't changed to save both server load and bandwidth usage. But, the details of how to best do that depend upon knowledge about your particular application that you haven't included.
You can at least use asynchronous I/O like this:
const util = require('util');
const fs = require('fs');
const readFile = util.promisify(fs.readFile);
function getIntInfo(interface) {
function readInfo(name) {
return readFile('/sys/class/net/'+ interface +'/statistics/' + name).then(data => data.toString());
}
return Promise.all(
readFile('tx_bytes'),
readFile('rx_bytes'),
readFile('tx_packets'),
readFile('rx_packets')
).then(([tx_bytes, rx_bytes, tx_packets, rx_packets]) => {
return {tx_bytes, rx_bytes, tx_packets, rx_packets};
}).catch(err => {
console.log(err);
return false;
});
}
And, you have to stop the interval any time a client disconnects and change how it calls getIntInfo():
io.of('/status').on('connection', function(socket) {
var query = socket.handshake.query['interface'];
var timer = setInterval(function() {
getIntInfo(query).then(info => {
var stream = ss.createStream();
ss(socket).emit('sendStatus', stream, info);
});
}, 1000);
socket.on('disconnect', function(){
// stop the timer for this connection
clearInterval(timer);
});
});
Now that I think about it a bit more, you could improve scalability quite a bit by having just one interval timer that was reading the data and then sending that one set of data to all listening clients that had connected to the /status namespace. You would reduce the file reading from once per second for every client to just once per second for no matter how many clients.

TCP socket handling for multiplayer game on NodeJS

I have a multiplayer game where my server uses nodejs and TCPSocket (net.createServer) to communicate with a client.
I have thousands of clients connecting to the server and many people are complaining that they are constantly being disconnected from the server.
Here is how my server handles the connections now:
Init:
var net = require("net");
server = net.createServer(function(socket) {
socket.setTimeout(15000);
socket.setKeepAlive(true);
socket.myBuffer = "";
socket.msgsQue = [];
socket.on("data", onData);
socket.on("error", onError);
socket.on("end", onClientDisconnect);
socket.on("timeout", onClientDisconnect);
});
server.listen(port);
Sending to client:
var stringData = JSON.stringify({name:event, message:data});
if (!this.socket.msgsQue || typeof this.socket.msgsQue == "undefined")
this.socket.msgsQue = [];
this.socket.msgsQue.unshift(stringData);
var i = this.socket.msgsQue.length;
while(i--) {
if (this.socket.writable) {
var elem = this.socket.msgsQue[i];
this.socket.write(elem+"\0");
this.socket.msgsQue.splice(i, 1);
} else {
//Unable to write at index "i"
break;//will send the rest on the next attempt
}
}
When disconnected
var onClientDisconnect = function() {
this.myBuffer = "";
delete this.myBuffer;
this.msgsQue = [];
delete this.msgsQue;
this.destroy();
}
Receiving from client
var onData = function(data) {
if (!data || !data.toString()) return;
var raw = data.toString();
this.myBuffer += raw;
var d_index = this.myBuffer.indexOf('\0'); // Find the delimiter
// While loop to keep going until no delimiter can be found
var string;
while (d_index > -1) {
// Create string up until the delimiter
string = this.myBuffer.substring(0,d_index);
// Cuts off the processed chunk
this.myBuffer = this.myBuffer.substring(d_index+1);
onMessage(string, this);//handle
// Find the new delimiter
d_index = this.myBuffer.indexOf('\0');
}
}
A problem I notice is that msgsQue becomes huge because the socket is not writable, but disconnect handler is not fired (or hired later..)
Can you give me some advises on how to optimize this ?
I noticed that sometimes I get disconnected, but I can ping the server, so it is definitely a server-related problem. Can it be because of high load on the server itself?
(I do not want to use socket.io because the last year I had many problems with it like memory leaking, freezing the server app, no support, etc..)

event handling not working as expected nodejs

writing this little domain search app, it should sequentially search the .com of each item in an array, but it keeps searching for test1. even if I do a console log within the search function it tells me the value of x is test2, and test 3. do I need to remove the listener or something?
I get the following output
domain test1.com
Domain Name: TEST1.COM
domain test2.com
Domain Name: TEST1.COM
domain test3.com
Domain Name: TEST1.COM
app.js
var port = 43;
var net = require('net');
var host = 'whois.internic.net';
var dotCom = new net.Socket();
var c = 0;
var connections = 0;
var dotComStatus;
dotCom.setEncoding('ascii');
var searches = ['test1', 'test2', 'test3'];
search(searches.shift());
function chkconnections(z) {
if (connections <= 0) {
if (searches.length >= 1) {
process.nextTick(function() {
search(searches.shift());
});
}
}
}
function search(x) {
var q = "domain " + x + ".com\r\n";
dotCom.connect(port, host, function() {
dotCom.write(q);
console.log(q);
connections++;
});
dotCom.on('data', function(data) {
c++;
if (c == 2) {
dotComStatus = data.split('\n')[1];
dotCom.on('close', function() {
console.log(dotComStatus);
connections--;
chkconnections();
});
}
});
}
There are several obvious problems with this code. Firstly putting the close event inside the data event is a bad idea. If the connection closed before data was received that section of code would never be reached.
Next is there is a big problem with the section with
c++;
if (c == 2)
Since you never reset c to 0 the next line dotComStatus = data.split('\n')[1]; is never executed. But then the socket closes and the event closed is triggered. And this is executed again.
console.log(dotComStatus);
connections--;
chkconnections();
But the value of dotComStatus has not changed since c was equal to 0. There are many examples of how to do this connect/data/end flow that is common in NodeJS.
var port = 43;
var net = require('net');
var host = 'whois.internic.net';
var searches = ['test1', 'test2', 'test3'];
search(searches.shift());
function chkconnections(z) {
if(searches.length > 0)
search(searches.shift());
}
function search(x) {
var dotCom = new net.Socket();
dotCom.setEncoding('ascii');
var q = "domain " + x + ".com\r\n";
dotCom.connect(port, host, function() {
dotCom.write(q);
});
var data = ""; // holding place until socket closes
dotCom.on('data', function(chunk) {
data += chunk; // add chunk to data
});
dotCom.on("end", function() {
// socket closed
dotComStatus = data.split('\n')[7]; // Should be 'Domain Name: blah'
console.log(dotComStatus);
chkconnections(); // move on to next
});
};

Increasing max_frame rate in amqp.js -- Hitting limits in buffer copy

I have a situation where I have about 50 listeners on 50 'direct' exchanges. The client and the server are in javascript (node.js) . It is using the node-amqp from postwait .
Things work fairly well at low frequency of messages. Once the message frequency increases ~ 5000 messages per minute then there is a buffer copy error being shown in amqp.js
From what I could trace the max_frame_size in amqp.js is fixed to 131072 .
I just tried to double the value from 128k to 256k . But doing so causes the node.js to silently fail without starting up. There is no error message. I am assuming that I also have to change the corresponding value (max_frame) in the rabbit.config file.
Do I have to do anything else to increase this value . Any other suggestions will also be appreciated.
I have attached the minimal code to simulate the error . Run the commands below in 2 windows to simulate the error
node engine-so-client.js -c 200 -p 12000
node server-so.js
File server-so.js
var util= require('util')
var amqp = require('amqp');
var express = require ('express')
function httpServer(exchange) {
console.log("In httpServer start %s",exchange.name);
var port;
app = express.createServer();
app.get('/message/:routingKey/:message',function(req,res) {
exchange.publish(req.params.routingKey,{'d' : req.params.message});
res.send('Published the message '+req.params.message+'\n');
});
app.get('/register/:socket/:routingKey',function(req,res) {
var queue1 = conn.queue('',
{autoDelete: false, durable: true, exclusive: true},
function() {
console.log("Queue1 Callback");
queue1.subscribe(
function(message) {
console.log("subscribe Callback for "+req.params.routingKey + " " + message.d);
});
console.log("Queue Callback Binding with "+req.params.routingKey);
queue1.bind(exchange.name,req.params.routingKey);
});
res.send('Started the socket at '+req.params.socket+'\n');
});
app.listen(3000);
app.use(express.logger());
console.log('Started server on port %s', app.address().port);
}
function setup() {
console.log("Setup");
var exchange = conn.exchange('cf2-demo',
{'type': 'direct', durable: false}, function() {
var queue = conn.queue('',
{autoDelete: false, durable: true, exclusive: true},
function() {
console.log("Queue Callback Binding with test key");
queue.bind(exchange.name,'testKey');
});
queue.on('queueBindOk',
function() { httpServer(exchange); });
});
console.log("Completed setup %s", exchange.name);
}
var conn = amqp.createConnection({host:'localhost',
login:'guest',
password:'guest'},
{defaultExchangeName: "cf2-demo"});
conn.on('ready',setup);
File engine-so-client.js
var program = require('commander.js');
var util = require('util');
var http = require('http');
program
.version('0.0.1')
.option('-h, --host <host>', 'Host running server', String,'localhost')
.option('-p, --port <port>', 'Port to open to connect messages on',Number,12000)
.option('-k, --key <key>,', 'Routing Key to be used',String,'key1')
.option('-c, --count <count>','Iteration count',Number,50)
.option('-m, --mesg <mesg>','Message prefix',String,'hello')
.option('-t, --timeout', 'Timeout in ms between message posts')
.parse(process.argv);
function setup(host,port,key,mesg) {
var client = http.createClient(3000, host);
var request = client.request('GET','/register/'+port+"/"+key);
request.end();
request.on('response', function(response) {
response.on('data', function(chunk) {
postMessage(host,port,key,mesg,1);
});
});
}
function postMessage(host,port,key,mesg,count) {
var timeNow = new Date().getTime();
var mesgNow = mesg+"-"+count+"-"+port;
console.log("Type: Sent Mesg, Message: %s, Time: %s",mesgNow,timeNow);
var client1 = http.createClient(3000, host);
var request1 = client1.request('GET','/message/'+key+"/"+mesgNow);
request1.end();
count++;
if (count <100) {
setTimeout( function() { postMessage(host,port,key,mesg,count); }, 1000 );
}
}
var port = program.port;
var host = program.host;
var key = program.key;
var mesg = program.mesg;
var count = program.count;
var keys = ['key1','key2','key3','key4','key5'];
var messages = ['hello','world','good','morning','bye'];
var start=port;
for (i=0; i<count; i++) {
var index = i%keys.length;
var socket = start + i;
setup(host,socket,keys[index],messages[index]);
}
Error attached
buffer.js:494
throw new Error('sourceEnd out of bounds');
^
Error: sourceEnd out of bounds
at Buffer.copy (buffer.js:494:11)
at frame (/home/hvram/programs/node_modules/node-amqp/amqp.js:170:10)
at header (/home/hvram/programs/node_modules/node-amqp/amqp.js:160:14)
at frameEnd (/home/hvram/programs/node_modules/node-amqp/amqp.js:205:16)
at frame (/home/hvram/programs/node_modules/node-amqp/amqp.js:172:14)
at header (/home/hvram/programs/node_modules/node-amqp/amqp.js:160:14)
at frameEnd (/home/hvram/programs/node_modules/node-amqp/amqp.js:205:16)
at frame (/home/hvram/programs/node_modules/node-amqp/amqp.js:172:14)
at header (/home/hvram/programs/node_modules/node-amqp/amqp.js:160:14)
at frameEnd (/home/hvram/programs/node_modules/node-amqp/amqp.js:205:16)

Resources