Data not sending data over socket in loop - node.js

I'm trying to send sensor information from instance of NodeJs to another(eventually on another server). When the while loop in the client code below is enable no connection or data is sent to the server. However when the while loop is commented out I get a connection and data. It's only a single object of data but that's expected when not pulling the sensor data in a loop.
Server Code:
'use strict';
const net = require('net');
const PORT = 5000;
const ADDRESS = '127.0.0.1';
let server = net.createServer(onClientConnected);
server.listen(PORT, ADDRESS);
function onClientConnected(socket) {
console.log(`New client: ${socket.remoteAddress}:${socket.remotePort}`);
socket.destroy();
}
console.log(`Server started at: ${ADDRESS}:${PORT}`);
function onClientConnected(socket) {
let clientName = `${socket.remoteAddress}:${socket.remotePort}`;
console.log(`${clientName} connected.`);
socket.on('data', (data) => {
let m = data.toString().replace(/[\n\r]*$/, '');
var d = {msg:{info:m}};
console.log(`${clientName} said: ${data.toString()}`);
socket.write(`We got your message (${m}). Thanks!\n`);
});
socket.on('end', () => {
console.log(`${clientName} disconnected.`);
});
Client Code:
var n = require('net');
var s = n.Socket();
s.connect(5000, 'localhost');
var i2c = require('i2c-bus');
var MPU6050 = require('i2c-mpu6050');
var address = 0x68;
var i2cl = i2c.openSync(1);
var sensor = new MPU6050(i2cl, address);
//while (true) {
var data = sensor.readSync();
console.log(data);
s.write(data.toString());
//}

Node's main event loop is single threaded so only a single thing is executing at any one time. A particular thread won't relinquish control back to the event loop to continue processing more events until it's finished. Socket.write doesn't actually write to the socket when it's called; instead it creates a task in the event loop queue. The while loop causes the current thread to never exit and effectively blocks the event loop, so the subsequent socket write events are never processed.
Replacing your client code with this should work and achieve effectively what you're trying to do:
var n = require('net');
var s = n.Socket();
s.connect(5000, 'localhost');
var i2c = require('i2c-bus');
var MPU6050 = require('i2c-mpu6050');
var address = 0x68;
var i2cl = i2c.openSync(1);
var sensor = new MPU6050(i2cl, address);
const readData = () => {
var data = sensor.readSync();
console.log(data);
s.write(data.toString());
setImmediate(readData);
}
readData();
setImmediate says to execute the function AFTER any other events have processed, in this case it will be the event from Socket.write. The Socket.write function also provides a callback that is called once the data is done writing to the socket.

Related

Socket IO not sending new sql data after client connects

I am creating a real-time app using. Socket IO, Node.js, Express.js, React for frontend, and Microsoft SQL for database. I only want to send data when the database is updated or when a new client is connected. Though when the client first connects, the IO connection fires off sending my data to the new client. But when I make a change to my database. The data never gets sent. My code is below. I feel as though I am close, but I am just missing something that makes the code work. I appreciate any kind of help.
const app = express();
const httpServer = require('http').createServer(app);
const io = require('socket.io')(httpServer);
const path = __dirname + '/views/';
let sqlQuery = require('./controllers/sqlController').queryDatabase;
let currentQueryData = {};
let connectedSocketID
let objectMatched = true;
app.use(express.static(path))
app.get('/', function (req,res) {
res.sendFile(path + "index.html");
});
// Function to emit data only when a change is found.
const sendData = (data, socket) => {
socket.emit('markerCreation', data);
}
// compare both objects and return a boolean value.
const compareObjects = (object1, object2) => {
return JSON.stringify(object1) === JSON.stringify(object2);
}
httpServer.listen(3001, () => {
console.log(`Server listening at ${3001}`);
})
io.on('connection', async socket => {
// Get new Query Data than compare the object with the currently saved Query data
let newQueryData = await sqlQuery();
objectMatched = compareObjects(currentQueryData, newQueryData)
if(!objectMatched) { // If objects matched is not true take the new data and save it in currentQueryData and send data to client.
currentQueryData = newQueryData;
sendData(currentQueryData, socket);
} else if (connectedSocketID !== socket.id) { // If socket is not already connected saved it in connected sockets and send data to client
connectedSocketID = socket.id;
sendData(currentQueryData, socket);
};
// Issue: Socket IO will stop sending to connected Client. If a new update happens on the sql database the change isn't passed along to
// the client.
});```

Node.js Net Sockets: Can't connect to multiple IP's

i want to readout data from 2 diffrent IP adresses. But i always get "EALREADY" Error.
Which means that the Socket is "already in use", although i'm destroying the socket after i received the data.
If i remove the for loop and only readout 1 adress, it works just fine...
If i'm adding timeouts they won't fire... and i don't know why either.
var socket = new net.Socket();
var test = [];
test[0] = '10.5.0.5';
test[1] = '10.5.0.11';
for(var i=0;i<test.length;i++) {
socket.connect('2711', test[i], function() {
socket.write('d\n');
console.log("1");
console.log(test[i]);
socket.on('data', function(data) {
data = data.toString();
console.log(data);
socket.destroy();
console.log("2");
});
});
}
Help highly appreciated.
Thanks in advance.
The "For loop" is synchronous but ".connect" and ".on('data'" are asynchronous, therefore the for loop finished before the first connection happens. The second connection attempt happens when first one is still in progress, that is why you get connection error.
You should try a recursive function, advancing step by step. For example I create a function that receives an array, and iterate the first element of array, connect, and on data close it and call again the function if array still has more items.
var socket = new net.Socket();
var test = [];
test[0] = '10.5.0.5';
test[1] = '10.5.0.11';
function testIP(test){
socket.connect('2711', test.shift(), function() {
socket.write('d\n');
console.log("1");
socket.on('data', function(data) {
data = data.toString();
console.log(data);
socket.destroy();
console.log("2");
if( test.length > 0 ){
testIP(test);
}
});
}
testIP(test);
Bonus: There are libraries in Node.js to work with asynchronous tasks

socket.broadcast.to(receiver_socket_id) emitting n number of data (instead of one) to the receiver, where n is the number of user connected

I am using socket.io for private chatting for the server side I am using
socket.broadcast.to(receiver_socket_id).emit('message', data); // where data is a json object containing text
And at the client side code I catch the data using
socket.on('message', function (data) {
alert(data. text);
});
Its working properly and showing the alert on that specific user (socket id) ‘s panel when only two socket are connected (sender and receiver). But the problem appears when one more user connects to that socket then I see two alerts and when total 4 user connected (sender + receiver + two others) then see 3 alerts. But the good point is I can see the alerts only that specific client's panel not the others.
I can’t understand the problem, please help.
Please have a look on it
gyazo.com/a98d3a64a9fc6487e6ded8ccd90fd5ab
it prints test three times because three browsers are opened.
Full code here:
Sever side (I have used Redis):
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var redis = require('redis');
server.listen(8080);
var usernames = {};
io.on('connection', function (socket) {
console.log(socket.id);
socket.on('adduser', function (userId) {
usernames[userId] = socket.id;
});
var redisClient = redis.createClient();
redisClient.subscribe('message-channel');
redisClient.on('message', function (channel, data) {
var jsonObj = JSON.parse(data);
var rcvrId = jsonObj.rcvrId;
socket.broadcast.to(usernames[rcvrId]).emit('message', data); // Not throwing error....should work
});
socket.on('disconnect', function () {
console.log(socket.id + ' Disconnected');
redisClient.quit();
});
});
Client side:
var socket = io.connect('http://localhost:8080');
var userId = $('input[name="userId"]').val();
var rcvrId = $('input[name="rcvrId"]').val();
socket.on('connect', function () {
// call the server-side function 'adduser' and send one parameter (value of prompt)
socket.emit('adduser', userId);
});
socket.on('message', function (data) {
data = jQuery.parseJSON(data);
console.log(data);
$("#messages").append("<div><strong>" + data.userId + " : </strong><span>" + data.message + "</span></div>");
});
You can use io.of('/').sockets[rcvrId].emit('message', data). In case you are using a different namespace just replace the / with your namespace.

Listening to Node.js stream 'readable' event causing source not to be read

var stream = require('stream');
var util = require('util');
function SourceReader(source){
stream.Readable.call(this);
this._buffer = '';
this._source = source;
source.on('readable', function(){
console.log('got here');
this.read();
}.bind(this));
}
util.inherits(SourceReader, stream.Readable);
SourceReader.prototype._read = function(){
var chunk = this._source.read();
console.log('chunk:'+ chunk);
if(chunk !== null){
this.push(chunk);
}
else
this.push(null);
};
var fs = require('fs');
var fileStream = fs.createReadStream('my.txt'); //some data
var sourceReader = new SourceReader(fileStream);
The above code works well as expected and outputs
got here
chunk:some data
chunk:null
but when I added a listener to the event 'readable', no data gets read.
sourceReader.on('readable', function(){
// it doesn't even get here
var data = sourceReader.read();
console.log(data);
});
outputs:
chunk:null
got here
It seems like the SourceReader._read gets called before the source's readable event got fired and presumably reads nothing. But then source readable event fires and calls SourceReader.read() but this time doesn't go into SourceReader._read and finishes.
Why is it so? So how do I handle readable events if the reader has a source that also fires readable event?
Thanks for answering.

Node.JS and Socket.io - Broadcast returns to sender

I am building a real-time web-application with multiple clients and a node.js server to handle state and broadcast event changes from clients. I have implemented Socket.io as transport mechanism between clients and server. When a client performs a specific action an event this is send to the server, which is broadcasted to other clients on the network.
However, sometimes when an array of events is recieved by the server, the server doesn't broadcast it but instead sends it back to the sending client - or sends it to all connected sockets including the sending client.
Here is a snippet of the server implementation, which basically listens for client events, collects them in an array which is broadcasted every 2 seconds.
If you want, I can include relevant snippets from the client implementation.
// Require HTTP module (to start server) and Socket.IO
var http = require('http'), io = require('socket.io'), users = require('./users');
// Commands to client
var USER_LOGIN = 0,
USER_LIST = 1,
USER_CONNECTED = 2,
USER_DISCONNECTED = 3,
COMMAND = 4,
COMMAND_SETNICKNAME = 0,
COMMAND_POSITION = 1,
COMMAND_MOUSEDOWN = 2,
var array = [];
var activeUsers = {};
// Start the server at port 8080
var server = http.createServer(function(req, res){
res.writeHead(200,{ 'Content-Type': 'text/html' });
});
server.listen(8080);
// Create a Socket.IO instance, passing it our server
var io = io.listen(server);
// Add a connect listener
io.sockets.on('connection', function(client){
console.log("started");
// Create periodical which ends a JSON message to the client every 2 seconds
var interval = setInterval(function() {
if(array.length >0) {
console.log("Sending to clients " + array);
//client.send(JSON.stringify(array));
client.broadcast.send(JSON.stringify(array));
array = [];
}
},2000);
// Success! Now listen to messages to be received
client.on('message',function(event){
console.log("--Reading event " + event);
var eventArray = JSON.parse(event);
for(var i = 0; i < eventArray.length; i++) {
var dataArray = eventArray[i];
var userId = parseInt(dataArray[0]);
var position = 1;
console.log("current element in array " + dataArray);
switch (parseInt(dataArray[position++])) {
case USER_LOGIN:
// Log in user with USERID.
var username = dataArray[position++];
console.log("Attempting to log in with user " + username);
// If valid, send USER_LOGIN broadcast to clients
if(login(username)) {
array.push([0, USER_LOGIN, login(username)]);
}
break;
io.sockets.on('connection', function(client){
console.log("started");
// Create periodical which ends a JSON message to the client every 2 seconds
var interval = setInterval(function() {
if(array.length >0) {
console.log("Sending to clients " + array);
//client.send(JSON.stringify(array));
client.broadcast.send(JSON.stringify(array));
array = [];
}
},2000);
you are creating one scheduler for each connection which is going to be a mess. Because you are creating the interval inside on('connection',
And I don't see any point of calling clients in an interval because you have a method to push the changes on some clients change event. That is the advantage of socket.io . Instead of using an interval you can just broadcast the change in another listener from client change event like this.
client.on('onSomechangeInClient',function(event){
//update array here..
client.broadcast.send(JSON.stringify(array));
});

Resources