This is the code that I deployed on Heroku. It's function is to update a certain key that i'm displaying every ten seconds.
const express = require('express');
const timestamp = require('time-stamp');
var app = express();
var port = 3000;
var server = app.listen(process.env.PORT || port, '0.0.0.0', function() {
console.log(`Listening at port: ${port}`);
});
app.get("/", encr)
code = 5
function encr(req, res){
console.log(`Code accessed, code = ${code}`)
res.send({code})
}
setInterval(async function() {
code = Math.floor((Math.random() * 100) + 1);
console.log(`Key updated. Time: ${timestamp.utc('DD/mm:ss:ms')}`)
}, 10 * 1000);
This setInterval function needs to run every 10 seconds, but ends up running every 10.01 seconds, which after a bit begins to add up.
Logs:
2021-02-08T20:15:45.414054+00:00 app[web.1]: Key updated. Time: 08/15:45:413
2021-02-08T20:15:55.423763+00:00 app[web.1]: Key updated. Time: 08/15:55:423
[...]
2021-02-08T20:18:25.543898+00:00 app[web.1]: Key updated. Time: 08/18:25:543
2021-02-08T20:18:35.551720+00:00 app[web.1]: Key updated. Time: 08/18:35:551
2021-02-08T20:18:45.559856+00:00 app[web.1]: Key updated. Time: 08/18:45:559
How can I fix this so it's exactly 10 seconds?
Timeouts/intervals aren't entirely accurate at the millisecond timescale. Instead of trying to reassign code every precise interval, consider instead saving the timestamp whenever code gets updated, and when it gets accessed again, check whether the timestamp has expired - if so, reassign it:
const makeCode = () => {
code = Math.floor((Math.random() * 100) + 1);
lastAssignTimestamp = performance.now();
};
let lastAssignTimestamp;
let code;
makeCode();
function encr(req, res){
if (performance.now() - lastAssignTimestamp > 10_000) {
// then the code has expired...
makeCode();
}
console.log(`Code accessed, code = ${code}`)
res.send({code})
}
I've edited my code to also print the last update's timestamp, that way I can just check directly in my other code without time keeping shenanigans
const express = require('express');
var app = express();
var port = 3000;
var server = app.listen(process.env.PORT || port, '0.0.0.0', function() {
console.log(`Listening at port: ${port}`);
});
app.get("/", encr)
code = 5
last_time = 0
const makeCode = () => {
code = Math.floor((Math.random() * 100) + 1);
lastAssignTimestamp = +new Date()
console.log(`New Code: ${code}`)
};
let lastAssignTimestamp;
makeCode();
setInterval(function(){
let x = +new Date()
if (x - lastAssignTimestamp > 1000 * 60){
// then the code has expired... every minute
makeCode();
}
last_time = lastAssignTimestamp
console.log(`Time left till next update: ${Math.round(((1000 * 60) - (x - lastAssignTimestamp))/1000)}`)
}, 1000);
function encr(req, res){
res.send(
{code, last_time}
)
}
Related
I'm new in socket.io and I learn node.js as well. I try to import a module in node.js and socket.io but it doesn't work. The server stops working.
app.js
const express = require('express');
const app = express();
const http = require('http');
const server = http.createServer(app);
const counter = require('./counter');
var io = require('socket.io')(server,{});
server.listen(3000, () => {
console.log('listening on *:3000');
});
io.on('connection', function(socket)
{
counter();
});
counter.js
const counter = () => {
let time = 20;
let countdown = setInterval(update, 1000);
function update() {
let sec = time % 60;
io.sockets.emit(`${sec}`);
time--;
if (sec == 0) {
io.sockets.emit('Time is over');
clearInterval(countdown);
}
}
}
module.exports = counter
counter() is working outside the io.on but I need to work inside. Is there any solution for this? I'm not sure what I should import in counter.js that the socket works.
Well I would imagine your main issue is that 'counter' has no idea what 'io' is. Try passing in io to counter.
const counter = (io) => { //<--- add io
let time = 20;
let countdown = setInterval(update, 1000);
function update() {
let sec = time % 60;
io.sockets.emit(`${sec}`);
time--;
if (sec == 0) {
io.sockets.emit('Time is over');
clearInterval(countdown);
}
}
}
This adds an argument to your counter function that we can access inside the function. When you call your 'counter' function- we'll pass the io object to it so that the function has access to the object like so:
io.on('connection', function(socket)
{
counter(io); // <--- add io
});
You can check out a working example of what you would like to accomplish here:
https://replit.com/#brandonetter/OrangeredShamelessVendor#index.js
I have a node.js script that creates a websocket connection to a crypto trading site (bitmex). It streams price data.
For some reason after an hour or two the stream goes bad and the prices (if streamed at all) are inacurate.
For now I restart it manually every hour but I need that to be done automatically. How can I do that?
Here is the code of the script:
var WebSocket = require('ws');
var ws = new WebSocket("wss://www.bitmex.com/realtime");
var couchbase = require('couchbase')
var cluster = new couchbase.Cluster('couchbase://localhost/');
cluster.authenticate('xxxxxx', 'xxxxxxx');
var bucket = cluster.openBucket('test');
var N1qlQuery = couchbase.N1qlQuery;
let num_msg = 0;
ws.onopen = function(){
ws.send(JSON.stringify({"op": "subscribe", "args": [
"trade:XBTUSD",
"trade:LTCZ18"]
}))
};
ws.onmessage = function(msg){
var response = JSON.parse(msg.data);
num_msg++
if(num_msg > 50) {
var coin = response['data'][0]['symbol'];
var price = response['data'][0]['price'];
//console.log(coin + ":" + price + "\n");
bucket.manager().createPrimaryIndex(
function(){
bucket.upsert( coin,
{
'price': price
},
function (err, result){
});
});
}
};
EDIT: I missed to mention that currently I use Windows 7 system (eventhough I do need to move to Ubuntu or similar).
EDIT 2: Final version of my code :)
const cron = require("node-cron");
var WebSocket = require('ws');
var couchbase = require('couchbase');
var dateTime = require('node-datetime');
let now = new Date();
minutes = now.getMinutes() + 1;
if(minutes + 30 > 59) {
minutes1 = minutes - 30;
} else {
minutes1 = minutes - 30;
}
if(minutes > minutes1) {
s_cron = minutes1 + "," + minutes + " * * * *";
} else {
s_cron = minutes + "," + minutes1 + " * * * *";
}
cron.schedule(s_cron, () => {
console.log("---------------------");
console.log("Running Cron Job");
var dt = dateTime.create();
var formatted = dt.format('Y-m-d H:M:S');
console.log(formatted);
// create bitmex ws
var ws = new WebSocket("wss://www.bitmex.com/realtime");
// connect to couchbase
var cluster = new couchbase.Cluster('couchbase://localhost/');
cluster.authenticate('xxxxxxxx', 'xxxxxxxxxx');
var bucket = cluster.openBucket('test');
var N1qlQuery = couchbase.N1qlQuery;
let num_msg = 0;
ws.onopen = function(){
ws.send(JSON.stringify({"op": "subscribe", "args": [
"trade:XBTUSD",
"trade:LTCZ18"]
}))
};
ws.onmessage = function(msg){
var response = JSON.parse(msg.data);
num_msg++
if(num_msg > 50) {
var coin = response['data'][0]['symbol'];
var price = response['data'][0]['price'];
//console.log(coin + ":" + price + "\n");
bucket.manager().createPrimaryIndex(
function(){
bucket.upsert( coin,
{
'price': price
},
function (err, result){
//bucket.disconnect()
});
});
}
};
});
Try 'node-cron': more at https://www.npmjs.com/package/node-cron Hope that works.
Consider using cron to restart every hour. Your crontab entry would look like:
0 * * * * <command to restart your app>
If you can't or don't want to use your system crontab or equivalent (not sure what it would be on Windows), you can use pm2.
pm2: https://www.npmjs.com/package/pm2
For how to make pm2 restart every hour, see https://github.com/Unitech/pm2/issues/1076 or https://stackoverflow.com/a/38062307/436641.
Another option would be node-cron: https://www.npmjs.com/package/node-cron
This is not really a question, but I wonder to know if what I did is correct because its working!
So, lets to the question, I`m monitoring many interfaces (PPPoE clients) at same to know its traffic reading the statistics from linux.
I`m using npm packages: express, socket.io and socket.io-stream.
Client:
var sessionsAccel = $('table.accel').DataTable([]);
sessionsAccel.on('preDraw', function() {
$('.interfaceAccel').each(function(i) {
var t = $(this).data();
sockets['socket' + t.id].disconnect();
delete speeds['tx_bytes' + t.id];
delete speeds['rx_bytes' + t.id];
});
})
.on('draw', function() {
$('.interfaceAccel').each(function(i) {
var t = $(this).data();
sockets['socket' + t.id] = io.connect('http://172.16.101.2:3000/status', {
query: 'interface=' + t.interface,
'forceNew': true
});
sockets['socket' + t.id].on("connect", function() {
ss(sockets['socket' + t.id]).on('sendStatus', function(stream, data) {
if (typeof speeds['tx_bytes' + t.id] != 'undefined') {
var speedtx = (data.tx_bytes - speeds['tx_bytes' + t.id]) * 8 / 1000;
var speedrx = (data.rx_bytes - speeds['rx_bytes' + t.id]) * 8 / 1000;
if (speedtx > 1000) {
speedtx = speedtx / 1000;
speedtx = speedtx.toFixed(2);
speedtx_info = speedtx + ' Mbps';
} else {
speedtx = speedtx.toFixed(2);
speedtx_info = speedtx + ' kbps';
}
if (speedrx > 1000) {
speedrx = speedrx / 1000;
speedrx = speedrx.toFixed(2);
speedrx_info = speedrx + ' Mbps';
} else {
speedrx = speedrx.toFixed(2);
speedrx_info = speedrx + ' kbps';
}
$('.tx_' + t.id).html(speedtx_info);
$('.rx_' + t.id).html(speedrx_info);
}
speeds['tx_bytes' + t.id] = data.tx_bytes;
speeds['rx_bytes' + t.id] = data.rx_bytes;
});
});
});
})
Server:
const app = require('express')();
const http = require('http').createServer(app);
const io = require('socket.io')(http);
const ss = require('socket.io-stream');
const path = require('path');
const fs = require('fs');
function getIntInfo(interface) {
if(fs.existsSync('/sys/class/net/'+ interface +'/statistics/tx_bytes')) {
var tx_bytes = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/tx_bytes').toString();
var rx_bytes = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/rx_bytes').toString();
var tx_packets = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/tx_packets').toString();
var rx_packets = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/rx_packets').toString();
return {tx_bytes : tx_bytes, rx_bytes : rx_bytes, tx_packets: tx_packets, rx_packets: rx_packets};
}else
return false;
}
io.of('/status').on('connection', function(socket) {
var query = socket.handshake.query['interface'];
var timer = setInterval(function() {
var stream = ss.createStream();
var info = getIntInfo(query);
ss(socket).emit('sendStatus', stream, info);
}, 1000);
socket.on('disconnect', function(){
socket.disconnect(true);
//console.info('disconnected user (id=' + socket.id + ').');
});
})
http.listen(3000, function(){
console.log('listening on *:3000');
});
That's it, every row from Datatable (which is the interface) open a socket connection and retrieve the statistics.
My question is, this will mess up my server with many I/O reading these files?
Since you're doing this every second for every connected client, it seems like you should probably cache this data so it doesn't have to be read from the disk or sent over the wire when it hasn't changed to save both server load and bandwidth usage. But, the details of how to best do that depend upon knowledge about your particular application that you haven't included.
You can at least use asynchronous I/O like this:
const util = require('util');
const fs = require('fs');
const readFile = util.promisify(fs.readFile);
function getIntInfo(interface) {
function readInfo(name) {
return readFile('/sys/class/net/'+ interface +'/statistics/' + name).then(data => data.toString());
}
return Promise.all(
readFile('tx_bytes'),
readFile('rx_bytes'),
readFile('tx_packets'),
readFile('rx_packets')
).then(([tx_bytes, rx_bytes, tx_packets, rx_packets]) => {
return {tx_bytes, rx_bytes, tx_packets, rx_packets};
}).catch(err => {
console.log(err);
return false;
});
}
And, you have to stop the interval any time a client disconnects and change how it calls getIntInfo():
io.of('/status').on('connection', function(socket) {
var query = socket.handshake.query['interface'];
var timer = setInterval(function() {
getIntInfo(query).then(info => {
var stream = ss.createStream();
ss(socket).emit('sendStatus', stream, info);
});
}, 1000);
socket.on('disconnect', function(){
// stop the timer for this connection
clearInterval(timer);
});
});
Now that I think about it a bit more, you could improve scalability quite a bit by having just one interval timer that was reading the data and then sending that one set of data to all listening clients that had connected to the /status namespace. You would reduce the file reading from once per second for every client to just once per second for no matter how many clients.
I started with this project for my real time stock price update project.
This project works well when I am working with one or two stocks, but not when I want to update the price of hundreds of stocks at the same time. I'd like to know if I'm doing this the right way. Right now I fetch the data for all stocks in a for loop on the server, but the price update is very very slow. I'd like to know how to improve this.
I'd like to know how to update hundreds of stock prices each second, without affecting server performance.
I don't know if I should be sending the server a list of stocks I need from the client like: var ids = [ '', '', '', ... ], or if I can run those ids from the server itself.
Which is best: Stocks request from client to server, or from server to client?
Note: I will be using a different url to get stock price.
My server side code :
////
// CONFIGURATION SETTINGS
///
var PORT = 4000;
var FETCH_INTERVAL = 5000;
var PRETTY_PRINT_JSON = true;
///
// START OF APPLICATION
///
var express = require('express');
var http = require('http');
var io = require('socket.io');
var app = express();
var server = http.createServer(app);
var io = io.listen(server);
io.set('log level', 1);
server.listen(PORT);
var ticker = "";
app.get('/:ticker', function(req, res) {
ticker = req.params.ticker;
res.sendfile(__dirname + '/index.html');
});
io.sockets.on('connection', function(socket) {
var local_ticker = ticker;
ticker = "";
//Run the first time immediately
get_quote(socket, local_ticker);
//Every N seconds
var timer = setInterval(function() {
var ids = ['AAPL', '' , ..........100 stocks];
var l = ids.length;
for(var i=0; i<l; i++){
get_quote(socket, ids[i])
}
}, FETCH_INTERVAL);
socket.on('disconnect', function () {
clearInterval(timer);
});
});
function get_quote(p_socket, p_ticker) {
http.get({
host: 'www.google.com',
port: 80,
path: '/finance/info?client=ig&q=' + p_ticker
}, function(response) {
response.setEncoding('utf8');
var data = "";
response.on('data', function(chunk) {
data += chunk;
});
response.on('end', function() {
if(data.length > 0) {
try {
var data_object = JSON.parse(data.substring(3));
} catch(e) {
return;
}
var quote = {};
quote.ticker = data_object[0].t;
quote.exchange = data_object[0].e;
quote.price = data_object[0].l_cur;
quote.change = data_object[0].c;
quote.change_percent = data_object[0].cp;
quote.last_trade_time = data_object[0].lt;
quote.dividend = data_object[0].div;
quote.yield = data_object[0].yld;
p_socket.emit('quote', PRETTY_PRINT_JSON ? JSON.stringify(quote, true, '\t') : JSON.stringify(quote));
}
});
});
}
My client side code :
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.js"></script>
<script type="text/javascript" src="http://localhost:4000/socket.io/socket.io.js"></script>
<script type="text/javascript">
$(document).ready(function() {
var socket = io.connect("http://localhost:4000");
socket.on('quote', function(data) {
var data = $("<pre>" + data + "</pre><hr />");
$("#quotes").append(data);
$("html, body").animate({ scrollTop: $(document).height() }, 100);
$(data).show("slide", { direction: "up" }, 250);
$(data).effect("highlight", {}, 1500);
});
});
</script>
<body>
<div id="quotes"></div>
</body>
I think that sending the desired ID's from the client side will make your application more flexible and easy to use. You can still write your server in a way that will be performant.
'For loops' will block Node's event loop. For async actions that need to iterate over an array I recommend:
https://github.com/caolan/async
Specifically 'async.each'
I haven't run your code but my gut tells me that my browser would not enjoy that much DOM manipulation all at once. I think that breaking the groups into smaller pieces would help. For instance:
Take your array of ID's and break it into 5. Then stagger the intervals of each.
var arr1 = [...]
var arr2 = [...]
var arr3 = [...]
var arr4 = [...]
var arr5 = [...]
setTimeout(doWorkOnArray(arr1), 4000)
setTimeout(doWorkOnArray(arr2), 3000)
setTimeout(doWorkOnArray(arr3), 2000)
setTimeout(doWorkOnArray(arr4), 1000)
setTimeout(doWorkOnArray(arr5), 0)
function doWorkOnArray(arr) {
setInterval(getData(arr), 5000)
}
Alternatively you could look at setting up a Master/Worker with something like Redis to queue the work. I think this would be the best performance. Check out:
https://github.com/Automattic/kue
I have a situation where I have about 50 listeners on 50 'direct' exchanges. The client and the server are in javascript (node.js) . It is using the node-amqp from postwait .
Things work fairly well at low frequency of messages. Once the message frequency increases ~ 5000 messages per minute then there is a buffer copy error being shown in amqp.js
From what I could trace the max_frame_size in amqp.js is fixed to 131072 .
I just tried to double the value from 128k to 256k . But doing so causes the node.js to silently fail without starting up. There is no error message. I am assuming that I also have to change the corresponding value (max_frame) in the rabbit.config file.
Do I have to do anything else to increase this value . Any other suggestions will also be appreciated.
I have attached the minimal code to simulate the error . Run the commands below in 2 windows to simulate the error
node engine-so-client.js -c 200 -p 12000
node server-so.js
File server-so.js
var util= require('util')
var amqp = require('amqp');
var express = require ('express')
function httpServer(exchange) {
console.log("In httpServer start %s",exchange.name);
var port;
app = express.createServer();
app.get('/message/:routingKey/:message',function(req,res) {
exchange.publish(req.params.routingKey,{'d' : req.params.message});
res.send('Published the message '+req.params.message+'\n');
});
app.get('/register/:socket/:routingKey',function(req,res) {
var queue1 = conn.queue('',
{autoDelete: false, durable: true, exclusive: true},
function() {
console.log("Queue1 Callback");
queue1.subscribe(
function(message) {
console.log("subscribe Callback for "+req.params.routingKey + " " + message.d);
});
console.log("Queue Callback Binding with "+req.params.routingKey);
queue1.bind(exchange.name,req.params.routingKey);
});
res.send('Started the socket at '+req.params.socket+'\n');
});
app.listen(3000);
app.use(express.logger());
console.log('Started server on port %s', app.address().port);
}
function setup() {
console.log("Setup");
var exchange = conn.exchange('cf2-demo',
{'type': 'direct', durable: false}, function() {
var queue = conn.queue('',
{autoDelete: false, durable: true, exclusive: true},
function() {
console.log("Queue Callback Binding with test key");
queue.bind(exchange.name,'testKey');
});
queue.on('queueBindOk',
function() { httpServer(exchange); });
});
console.log("Completed setup %s", exchange.name);
}
var conn = amqp.createConnection({host:'localhost',
login:'guest',
password:'guest'},
{defaultExchangeName: "cf2-demo"});
conn.on('ready',setup);
File engine-so-client.js
var program = require('commander.js');
var util = require('util');
var http = require('http');
program
.version('0.0.1')
.option('-h, --host <host>', 'Host running server', String,'localhost')
.option('-p, --port <port>', 'Port to open to connect messages on',Number,12000)
.option('-k, --key <key>,', 'Routing Key to be used',String,'key1')
.option('-c, --count <count>','Iteration count',Number,50)
.option('-m, --mesg <mesg>','Message prefix',String,'hello')
.option('-t, --timeout', 'Timeout in ms between message posts')
.parse(process.argv);
function setup(host,port,key,mesg) {
var client = http.createClient(3000, host);
var request = client.request('GET','/register/'+port+"/"+key);
request.end();
request.on('response', function(response) {
response.on('data', function(chunk) {
postMessage(host,port,key,mesg,1);
});
});
}
function postMessage(host,port,key,mesg,count) {
var timeNow = new Date().getTime();
var mesgNow = mesg+"-"+count+"-"+port;
console.log("Type: Sent Mesg, Message: %s, Time: %s",mesgNow,timeNow);
var client1 = http.createClient(3000, host);
var request1 = client1.request('GET','/message/'+key+"/"+mesgNow);
request1.end();
count++;
if (count <100) {
setTimeout( function() { postMessage(host,port,key,mesg,count); }, 1000 );
}
}
var port = program.port;
var host = program.host;
var key = program.key;
var mesg = program.mesg;
var count = program.count;
var keys = ['key1','key2','key3','key4','key5'];
var messages = ['hello','world','good','morning','bye'];
var start=port;
for (i=0; i<count; i++) {
var index = i%keys.length;
var socket = start + i;
setup(host,socket,keys[index],messages[index]);
}
Error attached
buffer.js:494
throw new Error('sourceEnd out of bounds');
^
Error: sourceEnd out of bounds
at Buffer.copy (buffer.js:494:11)
at frame (/home/hvram/programs/node_modules/node-amqp/amqp.js:170:10)
at header (/home/hvram/programs/node_modules/node-amqp/amqp.js:160:14)
at frameEnd (/home/hvram/programs/node_modules/node-amqp/amqp.js:205:16)
at frame (/home/hvram/programs/node_modules/node-amqp/amqp.js:172:14)
at header (/home/hvram/programs/node_modules/node-amqp/amqp.js:160:14)
at frameEnd (/home/hvram/programs/node_modules/node-amqp/amqp.js:205:16)
at frame (/home/hvram/programs/node_modules/node-amqp/amqp.js:172:14)
at header (/home/hvram/programs/node_modules/node-amqp/amqp.js:160:14)
at frameEnd (/home/hvram/programs/node_modules/node-amqp/amqp.js:205:16)