nodejs net.createServer large amount of data coming in - node.js

I have nodejs listening on tcp port and gets content from Flash XMLSocket. If I try to push a lot of data in one message from flash (XMLSocket.send(long_message)) I always end up with event stream.on("data", function(d) { fired while I want it to happen when entire message is transferred.
Flash's XMLSocket transfers data as UTF8 encoded string terminated with null byte.
How can I control my message consistency?
UPDATE
I've found similar question here. But there is no clear answer. I know the end of my message should be null byte, but could you please give me an example on how to store incomplete message and avoid overlapping with next/concurrent message
UPDATE2
After maerics's answer I've done something like
var server = net.createServer(function(stream) {
var dataBlock = "";
stream.on("data", function(d) {
processChunk(d);
});
function processChunk(data) {
var chunks = data.split("\0");
while (chunks.length > 1) {
if (dataBlock.length > 0) {
dataBlock += chunks.shift();
processIncompingMessage(dataBlock);
dataBlock = "";
}
else {
processIncompingMessage(chunks.shift());
}
}
dataBlock += chunks.shift();
}
}

Here's what I would do (tested):
var net = require('net');
var server = net.createServer(function (conn) {
var msg = ''; // Current message, per connection.
conn.setEncoding('utf8');
conn.on('message', function (m) {
console.log('MESSAGE: ' + m);
});
conn.on('data', function (data) {
msg += data.toString('utf8');
if (msg.charCodeAt(msg.length - 1) == 0) {
conn.emit('message', msg.substring(0, msg.length - 1));
msg = '';
}
});
});
Note that it is possible that multiple null separated messages could be encoded in a single data chunk, so you should expand this example to separate the data by null characters and process each one separately. Also, you might want to process the final, potentially incomplete message on the connection 'end' event.

Related

Replay a log file with NodeJS as if it were happening in real-time

I have a log file with about 14.000 aircraft position datapoints captured from a system called Flarm, it looks like this:
{"addr":"A","time":1531919658.578100,"dist":902.98,"alt":385,"vs":-8}
{"addr":"A","time":1531919658.987861,"dist":914.47,"alt":384,"vs":-7}
{"addr":"A","time":1531919660.217471,"dist":925.26,"alt":383,"vs":-7}
{"addr":"A","time":1531919660.623466,"dist":925.26,"alt":383,"vs":-7}
What I need to do is find a way to 'play' this file back in real-time (as if it were occuring right now, even though it's pre-recorded), and emit an event whenever a log entry 'occurs'. The file is not being added to, it's pre-recorded and the playing back would occur at a later stage.
The reason for doing this is that I don't have access to the receiving equipment when I'm developing.
The only way I can think to do it is to set a timeout for every log entry, but that doesn't seem like the right way to do it. Also, this process would have to scale to longer recordings (this one was only an hour long).
Are there other ways of doing this?
If you want to "play them back" with the actual time difference, a setTimeout is pretty much what you have to do.
const processEntry = (entry, index) => {
index++;
const nextEntry = getEntry(index);
if (nextEntry == null) return;
const timeDiff = nextEntry.time - entry.time;
emitEntryEvent(entry);
setTimeout(processEntry, timeDiff, nextEntry, index);
};
processEntry(getEntry(0), 0);
This emits the current entry and then sets a timeout based on the difference until the next entry.
getEntry could either fetch lines from a prefilled array or fetch lines individually based on the index. In the latter case only two lines of data would only be in memory at the same time.
Got it working in the end! setTimeout turned out to be the answer, and combined with the input of Lucas S. this is what I ended up with:
const EventEmitter = require('events');
const fs = require('fs');
const readable = fs.createReadStream("./data/2018-07-18_1509log.json", {
encoding: 'utf8',
fd: null
});
function read_next_line() {
var chunk;
var line = '';
// While this is a thing we can do, assign chunk
while ((chunk = readable.read(1)) !== null) {
// If chunk is a newline character, return the line
if (chunk === '\n'){
return JSON.parse(line);
} else {
line += chunk;
}
}
return false;
}
var lines = [];
var nextline;
const processEntry = () => {
// If lines is empty, read a line
if (lines.length === 0) lines.push(read_next_line());
// Quit here if we've reached the last line
if ((nextline = read_next_line()) == false) return true;
// Else push the just read line into our array
lines.push(nextline);
// Get the time difference in milliseconds
var delay = Number(lines[1].time - lines[0].time) * 1000;
// Remove the first line
lines.shift();
module.exports.emit('data', lines[0]);
// Repeat after the calculated delay
setTimeout(processEntry, delay);
}
var ready_to_start = false;
// When the stream becomes readable, allow starting
readable.on('readable', function() {
ready_to_start = true;
});
module.exports = new EventEmitter;
module.exports.start = function() {
if (ready_to_start) processEntry();
if (!ready_to_start) return false;
}
Assuming you want to visualize the flight logs, you can use fs watch as below, to watch the log file for changes:
fs.watch('somefile', function (event, filename) {
console.log('event is: ' + event);
if (filename) {
console.log('filename provided: ' + filename);
} else {
console.log('filename not provided');
}
});
Code excerpt is from here. For more information on fs.watch() check out here
Then, for seamless update on frontend, you can setup a Websocket to your server where you watch the log file and send newly added row via that socket to frontend.
After you get the data in frontend you can visualize it there. While I haven't done any flight visualization project before, I've used D3js to visualize other stuff (sound, numerical data, metric analysis and etc.) couple of times and it did the job every time.

Node js Buffers for incoming data

I am wondering if it makes sense to use Node's Buffer for incoming client data to a server. My server and clients are TCP based and I am using <EOF> to determine the end of a message. The message is always stringified JSON.
eg: {"name":"Bob"}<EOF>
In case the entire message does not come through, should I be using Node Buffer to build up the data, or a regular string?
If it is a buffer, I don't think I understand how to correctly build one up. This is my Client constructor that is created each time a socket connects to the Node server.
constructor(socket){
var self = this;
// properties
this.socket = socket;
this.buffer = Buffer.alloc(1024);
this.dataEnd = '<EOF>';
// append <EOF> to every msg
this.socket.write = function(msg){
msg += "<EOF>";
return Socket.prototype.write.call(this, msg);
};
// build up buffer
this.socket.on('data', function(data){
var buffLen = self.buffer.length;
var dataBuffer = Buffer.from(data);
if(buffLen + dataBuffer.length < 1024){
if(data.indexOf(self.dataEnd) === -1){
self.buffer.concat(dataBuffer);
}
}
});
return this;
}

nodejs weird if case

I'm totally new to nodejs and I have been trying to communicate arduino and node app.My node code listens to serial port and sends datas to page.Everything is ok but a weird case has occured.
var cleanData = ''; // this stores the clean data
var readData = ''; // this stores the buffer
sp.on("open", function () {
console.log('open serial communication');
});
// Listens to incoming data
sp.on('data', function (data) { // call back when data is received
readData += data.toString(); // append data to buffer
// as clean data. Then clear the buffer.
if (readData.indexOf('B') >= 0){
cleanData = readData.substring(readData.indexOf('B')+1,readData.length);
console.log(cleanData);
readData = '';
//io.sockets.emit('pulse', cleanData);
}
});
Datas that are starting with 'B' are supposed to be printed but when i run the code,I see
226
Q26
226
Q252
207
Q498
like these.Why datas with 'Q' get printed?
You could split on line feed too, something like;
sp.on('data', function (data) { // Call back when data is received
readData += data.toString(); // Append data to buffer
while(readData.indexOf('\n') > 0) { // Process all full rows
// Get everything before the line feed
var cleanData = readData.substring(0, readData.indexOf('\n'));
// ...and remove what we just extracted from the read buffer
readData = readData.substring(readData.indexOf('\n') + 1, readData.length);
// If it contains a B, well... you know what to do
if(cleanData.indexOf('B') > 0) {
console.log(cleanData);
//io.sockets.emit('pulse', cleanData);
}
}
});

node.js simple tcp chat server

I am trying to build a simple tcp chat server, WITHOUT socket.io.
Now, I have no problem broadcasting data across all sockets connected to the server.
My problem is assigning a socket identifier to each connection and retrieving them from an object.
Here is the code:
var net = require('net');
//keep track of sockets
var allSockets = {
sockets: {},
addSocket: function(socket, nick, table) {
this.sockets[table+nick] = socket;
},
removeSocket: function(nick, table) {
if (this.sockets[table+nick] !== undefined) {
this.sockets[table+nick] = null;
delete this.sockets[table+nick];
}
}
};
// create the server
var server = net.createServer(function (socket) {
var connected = false;
var jsoncommand = true;
//first data sent MUST BE json formatted string in this format
//{"nick":"someid","table":"tablenumber"}
var thisnick = "";
var thistable = "";
// get client ip
socket.name = socket.remoteAddress;
//write something on each connect
socket.write("You are connecting from " + socket.name + "\n");
socket.write(socket.name + " joined chat\n");
//handle data streams
socket.on('data', function (data) {
if (jsoncommand) {
//JSON.parse the first data stream
var some = JSON.parse(data);
//assign a socket.id based on nick and table
allSockets.addSocket(socket, some.table, some.nick);
socket.write(some.nick + " joined " + some.table + "\n");
thisnick = some.nick;
thistable = some.table;
connected = true;
//no longer waiting for first stream as JSON
jsoncommand = false;
} else if (connected) {
//write whatever data it recieves (function is below)
broadcast(data, thistable);
} else {
socket.write("You are not connected to any table");
socket.destroy();
connected = false;
jsoncommand = true;
thisnick = "";
thistable = "";
}
});
// remove the socket from allSockets but broadcast
//only to other users on the same table
socket.on('end', function () {
allSockets.removeSocket(thisnick, thistable);
broadcast(thisnick + " has left table " + thistable, thistable);
});
//this function should select from the allSockets object,
//only those, whose property matches "table"
//and write to those sockets only, when called
function broadcast(message, table) {
allSockets.sockets.forEach(function(socket) {
if (socket.hasOwnProperty(table)) {
socket.write(message);
}
});
}
});
server.listen(8000);
console.log("running at port 8000\n");
Just deploy this on your machine and connect with nc to port 8000
and be sure that the first thing you send it is something like
{"nick":"mynick","table":"mytable"}
you will see a message that your nick joined your table.
now if you send it something else, based on the fact that it stored your table name,
it should echo whatever you send it, to you and to other connections with different
nicks but on the same table, but the server dies, throwing an error that the allSockets
object, does not have a "for" or "forEach" method or indexOf, or any other.
SO, how do I correct this?
If my nick is "john" and I joined "my_table", and also "mary", "lisa" and "ana" joine the same "my_table", assuming I don't know their nicks, but I do know they are on "my_table",
HOW do I select from the allSockets object, those sockets that contain "my_table".
I tried hasOwnProperty, but that returns boolean, which only tells me that there are sockets with that property, but how do I put them in a for, or foreach loop in order to write to them.
I know it may be a silly question, or maybe im not even aproaching this correctly, but im a node.js beginner, so any advice is greatly apreaciated.
By the way I put this together from examples across the web.
As for the JSON string, its the first thing being sentby a desktop app on connection. Anyways I chose it for testing purposes so don't bother with it.
I suppose error about forEach happens here:
allSockets.sockets.forEach(function(socket) {
While allSockets.sockets is not Array, but it is object (key > value model, like hash table).
So to loop through each socket in it, you should change loop to:
for(var key in allSockets.sockets) {
var socket = allSockets.sockets[key];
// your logic here
}

Writing data to a socket in Node

I'm getting a weird result when writing to a socket. I wrote a simple experiment with a client and a server:
server.js
var net = require('net');
net.createServer(function (connection) {
connection.on('data', function (data) {
console.log('data: ' + data);
});
}).listen(1337);
client.js
var net = require('net');
var client = net.connect({port: 1337}, function () {
var i = 0;
function send() {
client.write('a');
if (++i < 100) {
process.nextTick(send);
} else {
client.end();
}
}
send();
});
I expected the server to show 100 lines of data: a, but I ended up getting a smaller number of data: aaaaaaa lines. There's socket.setNoDelay() that seems to be what I want, but it doesn't seem to have any effect.
What am I missing?
Thanks a lot,
The TCP protocol only sends exactly the bytes you write in the socket. They will not be separated into messages, that's up to you. If you would like to get 100 lines of a then you would have to define 100 separate messages, and choose a delimiter for them. Usually people delimit messages sent to a TCP socket by \r\n.
So you would need to change your server to
var net = require('net');
net.createServer(function (connection) {
connection.on('data', function (buffer) {
var data = buffer.toString();
if (data.indexOf('\r\n') > -1) { // If there's more than one line in the buffer
var lines = data.split('\r\n'); // Split the lines
var i = lines.length;
while (i--) { // This will read your lines in reverse, be careful
console.log(lines[i]); // Print each line
}
} else {
console.log(data); // If only one line came through, print it
}
});
}).listen(1337);
And your client to
var net = require('net');
var client = net.connect({port: 1337}, function () {
var i = 0;
function send() {
client.write('a\r\n'); // Notice the \r\n part. This is what will help you separate messages on the server
if (++i < 100) {
process.nextTick(send);
} else {
client.end();
}
}
send();
});
And then I believe you would get 100 lines of a.
This module also provides a very interesting way to do it, and of course ZeroMQ would also shine in this because it already has a nice protocol that puts things in envelopes and sends them.
Also interestingly but out of the scope of your question, the messages you send write to the socket on one side will not arrive in the same order on the server. If you change your send function to
function send() {
if (++i < 100) {
client.write('a'+i+'\r\n');
process.nextTick(send);
} else {
client.end();
}
}
you can see them arriving not in the order you sent them.
By "The TCP protocol only sends exactly the bytes you write in the socket" I mean that if you do socket.write("1"); socket.write("2"), you will receive "12" on the server, because that's what you wrote on the socket. You have to explicitly separate your messages by something so that the server can know when a message starts and when a message ends.
About receiving things in order or not, you'll notice that if you remove the process.nexTick and have your client like:
var net = require('net');
var client = net.connect({port: 1337}, function () {
var i = 100;
while (i--) {
client.write('a'+i+'\r\n');
}
});
you'll get two messages on the server (at least I got): first numbers 83 - 99 and then 0 - 82, despite having wrote them in order.
Its because TCP splits it in packets in some magic way. The first package was actually larger than the second one, so it got there last. You can read more about how TCP works in the wikipedia page of course, and this video is probably going to say more than what you need to hear but its good to understand everything you're working with.

Resources