I am able to send a Startup Message to the PostgreSQL server I have running and get a response from that server. I get ParameterStatus messages. The problem is I never get any type of Authentication message. My question is this: Why is it that the server never sends any type of Authentication message back to me?
Below I will show you my code snippet for understanding how the startup part of the protocol works, a couple lines of what it outputs for debugging (so that hopefully you won't have to even read my code), what I think is useful information from the PostgreSQL Documentation for understanding my question and another resource I have found to be useful for visualizing the protocol.
This is my code:
var net = require('net');
var BlueBird = require('bluebird');
var Buffer = require('buffer').Buffer;
var createStartupMessage = function(user_name, database_name){
var buffer_size = 22 + user_name.length + 1 + database_name.length + 1 + 1;
var StartUpMessage = new Buffer(buffer_size);
var position_in_buffer = 0;
StartUpMessage.writeUInt32BE(buffer_size, 0);
position_in_buffer += 4;
StartUpMessage.writeUInt32BE(196608, position_in_buffer); //version 3.0
position_in_buffer += 4;
position_in_buffer = addMessageSegment(StartUpMessage, "user", position_in_buffer);
position_in_buffer = addMessageSegment(StartUpMessage, user_name, position_in_buffer);
position_in_buffer = addMessageSegment(StartUpMessage, "database", position_in_buffer);
position_in_buffer = addMessageSegment(StartUpMessage, database_name, position_in_buffer);
//Add the last null terminator to the buffer
addNullTerminatorToMessageSegment(StartUpMessage, position_in_buffer);
console.log("The StartUpMessage looks like this in Hexcode: " + StartUpMessage.toString('hex'));
console.log("The length of the StartupMessage in Hexcode is: " + StartUpMessage.toString('hex').length);
return StartUpMessage;
};
var addMessageSegment = function(StartUpMessage, message_segment, position_in_buffer){
var bytes_in_message_segment = Buffer.byteLength(message_segment);
StartUpMessage.write(message_segment, position_in_buffer, StartUpMessage - position_in_buffer, 'utf8');
position_in_buffer = position_in_buffer + bytes_in_message_segment;
position_in_buffer = addNullTerminatorToMessageSegment(StartUpMessage, position_in_buffer);
return position_in_buffer;
};
var addNullTerminatorToMessageSegment = function(StartUpMessage, position_in_buffer){
StartUpMessage.writeUInt8(0, position_in_buffer);
position_in_buffer = position_in_buffer + 1;
return position_in_buffer;
};
//Here is where everything starts. The functions above are called within this BlueBird Promise.
BlueBird.coroutine(function* () {
var host = "127.0.0.1";
var port = "5432";
var idle_timeout = 10000;
var MySocket = new net.Socket();
MySocket.setTimeout(idle_timeout);
var StartUpMessage = createStartupMessage("testusertwo", "testdatabasetwo");
var data = yield new Promise(
function resolver(resolve, reject) {
var number_of_responses = 0;
var number_of_responses_to_wait_for = 2;
MySocket.on('connect', function () {
var message = StartUpMessage.toString("utf8");
var flushed = MySocket.write(message, "utf8");
console.log("Message flushed to kernel: " + flushed);
});
MySocket.on('data', function (data) {
console.log("The response from the server is: " + data.toString('utf8'));
console.log("----This Line Divides the Response Below from the Response Above----");
if( number_of_responses !== number_of_responses_to_wait_for){
number_of_responses += 1;
} else {
resolve(data);
}
});
MySocket.on('error', function (error) {
reject(error);
});
MySocket.connect(port, host);
}
);
return data;
})()
.then(function (data) {
return data;
})
.catch(function (error) {
console.error(error);
});
These are a couple lines from what my code outputs for debugging purposes. It shows the hexcode representation of the initial utf-8 encoded message I send to the server (startup message format is shown on slide 9 via the link at the bottom). Then it shows the servers response.
After this my program hangs waiting where I am waiting to see it send an Authentication class of message. In the Startup Message I have bolded the first two 32 bit Big Endian integers and all the null terminators for convenience. Also, the ? marks at the end (in ?M2\??ZI) are really those diamond question marks from utf-8 and this ending part changes on every run as well. I do not know why.
Some output from my code:
The StartUpMessage looks like this in Hexcode:
**0000003300030000**75736572**00**746573747573657274776f**00**6461746162617365**00**74657374646174616261736574776f**0000**
The response from the server is:
Sapplication_nameSclient_encodingUTF8SDateStyleISO, MDYSinteger_datetimesonSntervalStylepostgresSis_superuseroffSserver_encodingUTF8Sserver_version9.5.0S&session_authorizationtestusertwoS#standard_conforming_stringsonSTimeZoneUS/EasternK
?M2\??ZI
This is what I think is relevant Information from the Postgresql Documentation:
50.2.Message Flow.1.Start-up:
To begin a session, a frontend opens a connection to the server and sends a startup message.
The authentication cycle ends with the server either rejecting the connection attempt (ErrorResponse), or sending AuthenticationOk.
This section says some other things as well that make it sound like I should either get one of the many Authentication messages listed (such as AuthenticationCleartextPassword message) or an AuthenticationOk if a password is not needed and everything happens without an error. If there is an error, then I should get an ErrorResponse message.
50.5.Message Formats:
In this section it is indicated that if the first Byte in the server response is ’S’, then the Message is classified as a ParameterStatus message.
In this section it also indicates that if the first Byte in the server response is ‘R’, then the Message is classified as an Authentication message.
The useful resource I found:
I think this is a very good resource for visualizing the message flow protocol. The authors name is Jan Urban ́ski. On slide 9, the startup packet is shown. The only thing I’ve found (with node.js anyway) is there needs to be another null terminator box before the . . . box.
https://www.pgcon.org/2014/schedule/attachments/330_postgres-for-the-wire.pdf
After looking on Wireshark, I realized that I was getting an Authentication message ('R' type Message). The problem was that I was parsing the data from the server incorrectly. I immediately converted it to a UTF8 string. The data needs to be parsed according to the message formats before any of it can be converted to UTF8. This is because the formats are not just a bunch of chars strung together. They include 32 bit big endian ints and 16 big endian ints.
Related
I have a log file with about 14.000 aircraft position datapoints captured from a system called Flarm, it looks like this:
{"addr":"A","time":1531919658.578100,"dist":902.98,"alt":385,"vs":-8}
{"addr":"A","time":1531919658.987861,"dist":914.47,"alt":384,"vs":-7}
{"addr":"A","time":1531919660.217471,"dist":925.26,"alt":383,"vs":-7}
{"addr":"A","time":1531919660.623466,"dist":925.26,"alt":383,"vs":-7}
What I need to do is find a way to 'play' this file back in real-time (as if it were occuring right now, even though it's pre-recorded), and emit an event whenever a log entry 'occurs'. The file is not being added to, it's pre-recorded and the playing back would occur at a later stage.
The reason for doing this is that I don't have access to the receiving equipment when I'm developing.
The only way I can think to do it is to set a timeout for every log entry, but that doesn't seem like the right way to do it. Also, this process would have to scale to longer recordings (this one was only an hour long).
Are there other ways of doing this?
If you want to "play them back" with the actual time difference, a setTimeout is pretty much what you have to do.
const processEntry = (entry, index) => {
index++;
const nextEntry = getEntry(index);
if (nextEntry == null) return;
const timeDiff = nextEntry.time - entry.time;
emitEntryEvent(entry);
setTimeout(processEntry, timeDiff, nextEntry, index);
};
processEntry(getEntry(0), 0);
This emits the current entry and then sets a timeout based on the difference until the next entry.
getEntry could either fetch lines from a prefilled array or fetch lines individually based on the index. In the latter case only two lines of data would only be in memory at the same time.
Got it working in the end! setTimeout turned out to be the answer, and combined with the input of Lucas S. this is what I ended up with:
const EventEmitter = require('events');
const fs = require('fs');
const readable = fs.createReadStream("./data/2018-07-18_1509log.json", {
encoding: 'utf8',
fd: null
});
function read_next_line() {
var chunk;
var line = '';
// While this is a thing we can do, assign chunk
while ((chunk = readable.read(1)) !== null) {
// If chunk is a newline character, return the line
if (chunk === '\n'){
return JSON.parse(line);
} else {
line += chunk;
}
}
return false;
}
var lines = [];
var nextline;
const processEntry = () => {
// If lines is empty, read a line
if (lines.length === 0) lines.push(read_next_line());
// Quit here if we've reached the last line
if ((nextline = read_next_line()) == false) return true;
// Else push the just read line into our array
lines.push(nextline);
// Get the time difference in milliseconds
var delay = Number(lines[1].time - lines[0].time) * 1000;
// Remove the first line
lines.shift();
module.exports.emit('data', lines[0]);
// Repeat after the calculated delay
setTimeout(processEntry, delay);
}
var ready_to_start = false;
// When the stream becomes readable, allow starting
readable.on('readable', function() {
ready_to_start = true;
});
module.exports = new EventEmitter;
module.exports.start = function() {
if (ready_to_start) processEntry();
if (!ready_to_start) return false;
}
Assuming you want to visualize the flight logs, you can use fs watch as below, to watch the log file for changes:
fs.watch('somefile', function (event, filename) {
console.log('event is: ' + event);
if (filename) {
console.log('filename provided: ' + filename);
} else {
console.log('filename not provided');
}
});
Code excerpt is from here. For more information on fs.watch() check out here
Then, for seamless update on frontend, you can setup a Websocket to your server where you watch the log file and send newly added row via that socket to frontend.
After you get the data in frontend you can visualize it there. While I haven't done any flight visualization project before, I've used D3js to visualize other stuff (sound, numerical data, metric analysis and etc.) couple of times and it did the job every time.
I am wondering if it makes sense to use Node's Buffer for incoming client data to a server. My server and clients are TCP based and I am using <EOF> to determine the end of a message. The message is always stringified JSON.
eg: {"name":"Bob"}<EOF>
In case the entire message does not come through, should I be using Node Buffer to build up the data, or a regular string?
If it is a buffer, I don't think I understand how to correctly build one up. This is my Client constructor that is created each time a socket connects to the Node server.
constructor(socket){
var self = this;
// properties
this.socket = socket;
this.buffer = Buffer.alloc(1024);
this.dataEnd = '<EOF>';
// append <EOF> to every msg
this.socket.write = function(msg){
msg += "<EOF>";
return Socket.prototype.write.call(this, msg);
};
// build up buffer
this.socket.on('data', function(data){
var buffLen = self.buffer.length;
var dataBuffer = Buffer.from(data);
if(buffLen + dataBuffer.length < 1024){
if(data.indexOf(self.dataEnd) === -1){
self.buffer.concat(dataBuffer);
}
}
});
return this;
}
I'm trying to make a communication between nodejs server and a red pitaya card.
The client connect to the web server, use a form to choose some settings , they are send to a red pitaya who send back a big string of numbers.
Then, the web server write them in a downloadable file.
Its working randomly.
here is the server :
var settings = require('./settings_model');
var fs = require('fs');
var net = require('net');
var msg = [];
module.exports = function(app) {
net.createServer(function (socket) {
socket.on('data', function (data) {
/* test console */
var msg = JSON.stringify(data);
console.log("data received :" + msg);
tab = new Array();
for(i = 0; i < nbPix; i++){
tab[i] = new Array();
fs.appendFile('./public/img.txt', '\n');
for(var j=0 ; j < len; j++){
tab[i][j]= data[i+(j*nbPix)];
if(!isNaN(tab[i][j])){
fs.appendFile('./public/img.txt', tab[i][j]+ " ");
};
};
};
});
app.post('/api/sendSettings', function(req, res) {
// creation of the file
fs.writeFile("./public/img.txt", "", function(err){
if(err){console.log(err)};
console.log("the file was saved");
});
// here we send settings to the red pitaya
socket.write(input);
res.end();
});
}).listen(9000);
};
For tiny values, sometimes it works. I can see the log :
data received :{"type":"Buffer","data":[1,1,....2]}
But, for the same values, it can be divided and bug my 2d array
data receive :{"type":"Buffer","data":[1,1,....1]}
data receive :{"type":"Buffer","data":[2,2,....2]}
And if the data received is too big, system crash with an :
Error : EMFILE: too many open files, open './public/img.txt'
Is there a way to set the received stream size ? i think i need all the data to build my 2d array.
For the EMFILE error, ive tried to change settings like "open file" size. Still not working. It should not try to open more than one file anyway ??
i'm new to this kind of project so please if u can give me hints or docs links, ill gladly accept!
I'm getting a weird result when writing to a socket. I wrote a simple experiment with a client and a server:
server.js
var net = require('net');
net.createServer(function (connection) {
connection.on('data', function (data) {
console.log('data: ' + data);
});
}).listen(1337);
client.js
var net = require('net');
var client = net.connect({port: 1337}, function () {
var i = 0;
function send() {
client.write('a');
if (++i < 100) {
process.nextTick(send);
} else {
client.end();
}
}
send();
});
I expected the server to show 100 lines of data: a, but I ended up getting a smaller number of data: aaaaaaa lines. There's socket.setNoDelay() that seems to be what I want, but it doesn't seem to have any effect.
What am I missing?
Thanks a lot,
The TCP protocol only sends exactly the bytes you write in the socket. They will not be separated into messages, that's up to you. If you would like to get 100 lines of a then you would have to define 100 separate messages, and choose a delimiter for them. Usually people delimit messages sent to a TCP socket by \r\n.
So you would need to change your server to
var net = require('net');
net.createServer(function (connection) {
connection.on('data', function (buffer) {
var data = buffer.toString();
if (data.indexOf('\r\n') > -1) { // If there's more than one line in the buffer
var lines = data.split('\r\n'); // Split the lines
var i = lines.length;
while (i--) { // This will read your lines in reverse, be careful
console.log(lines[i]); // Print each line
}
} else {
console.log(data); // If only one line came through, print it
}
});
}).listen(1337);
And your client to
var net = require('net');
var client = net.connect({port: 1337}, function () {
var i = 0;
function send() {
client.write('a\r\n'); // Notice the \r\n part. This is what will help you separate messages on the server
if (++i < 100) {
process.nextTick(send);
} else {
client.end();
}
}
send();
});
And then I believe you would get 100 lines of a.
This module also provides a very interesting way to do it, and of course ZeroMQ would also shine in this because it already has a nice protocol that puts things in envelopes and sends them.
Also interestingly but out of the scope of your question, the messages you send write to the socket on one side will not arrive in the same order on the server. If you change your send function to
function send() {
if (++i < 100) {
client.write('a'+i+'\r\n');
process.nextTick(send);
} else {
client.end();
}
}
you can see them arriving not in the order you sent them.
By "The TCP protocol only sends exactly the bytes you write in the socket" I mean that if you do socket.write("1"); socket.write("2"), you will receive "12" on the server, because that's what you wrote on the socket. You have to explicitly separate your messages by something so that the server can know when a message starts and when a message ends.
About receiving things in order or not, you'll notice that if you remove the process.nexTick and have your client like:
var net = require('net');
var client = net.connect({port: 1337}, function () {
var i = 100;
while (i--) {
client.write('a'+i+'\r\n');
}
});
you'll get two messages on the server (at least I got): first numbers 83 - 99 and then 0 - 82, despite having wrote them in order.
Its because TCP splits it in packets in some magic way. The first package was actually larger than the second one, so it got there last. You can read more about how TCP works in the wikipedia page of course, and this video is probably going to say more than what you need to hear but its good to understand everything you're working with.
I have nodejs listening on tcp port and gets content from Flash XMLSocket. If I try to push a lot of data in one message from flash (XMLSocket.send(long_message)) I always end up with event stream.on("data", function(d) { fired while I want it to happen when entire message is transferred.
Flash's XMLSocket transfers data as UTF8 encoded string terminated with null byte.
How can I control my message consistency?
UPDATE
I've found similar question here. But there is no clear answer. I know the end of my message should be null byte, but could you please give me an example on how to store incomplete message and avoid overlapping with next/concurrent message
UPDATE2
After maerics's answer I've done something like
var server = net.createServer(function(stream) {
var dataBlock = "";
stream.on("data", function(d) {
processChunk(d);
});
function processChunk(data) {
var chunks = data.split("\0");
while (chunks.length > 1) {
if (dataBlock.length > 0) {
dataBlock += chunks.shift();
processIncompingMessage(dataBlock);
dataBlock = "";
}
else {
processIncompingMessage(chunks.shift());
}
}
dataBlock += chunks.shift();
}
}
Here's what I would do (tested):
var net = require('net');
var server = net.createServer(function (conn) {
var msg = ''; // Current message, per connection.
conn.setEncoding('utf8');
conn.on('message', function (m) {
console.log('MESSAGE: ' + m);
});
conn.on('data', function (data) {
msg += data.toString('utf8');
if (msg.charCodeAt(msg.length - 1) == 0) {
conn.emit('message', msg.substring(0, msg.length - 1));
msg = '';
}
});
});
Note that it is possible that multiple null separated messages could be encoded in a single data chunk, so you should expand this example to separate the data by null characters and process each one separately. Also, you might want to process the final, potentially incomplete message on the connection 'end' event.