Require the modules
var net = require("net");
Store the users and the connections number
var count = 0,
users = {};
Creates the server
var server = net.createServer(function (conn){
Stores the current nickname and set the utf8 encoding
var nickname;
conn.setEncoding('utf8');
Shows a message on the shell when you stablish a connection
conn.write(' > welcome to \033[92mnode-chat\033[39m!'
+ '\n > ' + count + ' other people are connected at this time.'
+ '\n > please write your name and press enter: ');
The number of connections++
count++;
When recives data it checks if there is a user in the storage with that name shows a message and return. Else shows a welcome message. Otherwise, if you input a message or any data (after have registered your nickname) shows it on the shell.
conn.on('data', function (data){
data = data.replace('\r\n', '');
if(!nickname) {
if(users[data]) {
conn.write('\033[93m > nickname already in use. Try again:\033[39m ');
return;
} else {
nickname = data;
users[nickname] = conn;
for(var i in users) {
users[i].write('\033[90m > ' + nickname + ' joined the room\033[39m\n');
}
}
} else {
for(var i in users) {
if(i != nickname) {
users[i].write('\033[96m > ' + nickname + ':\033[39m ' + data + '\n');
}
}
}
});
When you close or end the connection deletes your nickname from the storage, number of connections-- and shows a message.
conn.on('close', function(){
count--;
delete users[nickname];
conn.write('\033[90 > ' + nickname + ' left the room\033[39m\n');
});
});
Server on port 3000
server.listen(3000, function (){
console.log('\033[96m server listening on *:3000\033[39m');
});
I have a bug in my chat. I stablish two telnet connections using de shell. But when I close one the other one close two, and shows me a error message. What is wrong with my code?
You're trying to write to a closed connection, which will fail since writing to a closed Writable stream throws an error.
conn.on('close', function(){
count--;
delete users[nickname];
conn.write('\033[90 > ' + nickname + ' left the room\033[39m\n');
});
What you want is to broadcast the message to all other users either using Object.keys or a for-in loop.
conn.on('close', function() {
count --;
delete users[nickname];
Object.keys(users).forEach(function (user) {
users[user].write(nickname + ' left the room');
});
}
You also don't need a separate count variable to track connected users.
Object.keys(users).length;
But when I close one the other one close two, and shows me a error
message
It is because you're not listening for an error event and, by default, an EventEmitter throws an error if an error occurs and no error listener is attached.
conn.on('error', console.log);
Related
We do read an XML file (using xml-stream) with about 500k elements and do insert them into MongoDB like this:
xml.on(`endElement: product`, writeDataToDb.bind(this, "product"));
Insert in writeDataToDb(type, obj) looks like this:
collection.insertOne(obj, {w: 1, wtimeout: 15000}).catch((e) => { });
Now when the Mongo connection gets disconnected, the xml stream still reads and the console gets flooded with error messages (can't insert, disconnected, EPIPE broken, ...).
In the docs it says:
When you shut down the mongod process, the driver stops processing operations and keeps buffering them due to bufferMaxEntries being -1 by default meaning buffer all operations.
What does this buffer actually do?
We notice when we insert data and close the mongo server, the things get buffered, then we bring the mongo server back up, the native driver successfully reconnects and node resumes inserting data but the buffered documents (during mongo beeing offline) do not get inserted again.
So I question this buffer and its use.
Goal:
We are looking for the best way to keep inserts in buffer until mongo comes back (in 15000milliseconds according to wtimeout) and let then insert the buffered documents or make use of xml.pause(); and xml.resume() which we tried without success.
Basically we need a little help in how to handle disconnects without data loss or interrupts.
Inserting 500K elements with insertOne() is a very bad idea. You should instead use bulk operations that allows you to insert many document in a single request.
(here for example 10000, so it can be done in 50 single requests)
To avoid buffering issue, you can manually handle it:
Disable buffering with bufferMaxEntries: 0
Set reconnect properties: reconnectTries: 30, reconnectInterval: 1000
Create a bulkOperation and feed it with 10000 items
Pause the xml reader. Try to insert the 10000 items. If it fails, retry every 3000ms until it succeed
You may face some duplicate ID issues if the bulk operation is interrupted during execution, so ignore them (error code: 11000)
here is a sample script :
var fs = require('fs')
var Xml = require('xml-stream')
var MongoClient = require('mongodb').MongoClient
var url = 'mongodb://localhost:27017/test'
MongoClient.connect(url, {
reconnectTries: 30,
reconnectInterval: 1000,
bufferMaxEntries: 0
}, function (err, db) {
if (err != null) {
console.log('connect error: ' + err)
} else {
var collection = db.collection('product')
var bulk = collection.initializeUnorderedBulkOp()
var totalSize = 500001
var size = 0
var fileStream = fs.createReadStream('data.xml')
var xml = new Xml(fileStream)
xml.on('endElement: product', function (product) {
bulk.insert(product)
size++
// if we have enough product, save them using bulk insert
if (size % 10000 == 0) {
xml.pause()
bulk.execute(function (err, result) {
if (err == null) {
bulk = collection.initializeUnorderedBulkOp()
console.log('doc ' + (size - 10000) + ' : ' + size + ' saved on first try')
xml.resume()
} else {
console.log('bulk insert failed: ' + err)
counter = 0
var retryInsert = setInterval(function () {
counter++
bulk.execute(function (err, result) {
if (err == null) {
clearInterval(retryInsert)
bulk = collection.initializeUnorderedBulkOp()
console.log('doc ' + (size - 10000) + ' : ' + size + ' saved after ' + counter + ' tries')
xml.resume()
} else if (err.code === 11000) { // ignore duplicate ID error
clearInterval(retryInsert)
bulk = collection.initializeUnorderedBulkOp()
console.log('doc ' + (size - 10000) + ' : ' + size + ' saved after ' + counter + ' tries')
xml.resume()
} else {
console.log('failed after first try: ' + counter, 'error: ' + err)
}
})
}, 3000) // retry every 3000ms until success
}
})
} else if (size === totalSize) {
bulk.execute(function (err, result) {
if (err == null) {
db.close()
} else {
console.log('bulk insert failed: ' + err)
}
})
}
})
}
})
sample log output:
doc 0 : 10000 saved on first try
doc 10000 : 20000 saved on first try
doc 20000 : 30000 saved on first try
[...]
bulk insert failed: MongoError: interrupted at shutdown // mongodb server shutdown
failed after first try: 1 error: MongoError: no connection available for operation and number of stored operation > 0
failed after first try: 2 error: MongoError: no connection available for operation and number of stored operation > 0
failed after first try: 3 error: MongoError: no connection available for operation and number of stored operation > 0
doc 130000 : 140000 saved after 4 tries
doc 140000 : 150000 saved on first try
[...]
I don't know specifically about Mongodb driver and this buffer of entries. Maybe it only keeps data in specific scenarios.
So I will answer to this question with a more general approach that can work with any database.
To summarize, you have two problems:
You are not recovering from failed attempts
XML stream send data too fast
To handle the first issue, you need to implement a retry algorithm that will ensure that many attempts are made before giving up.
To handle the second issue, you need to implement back pressure on the xml stream. You can do that using the pause method, the resume method and an input buffer.
var Promise = require('bluebird');
var fs = require('fs');
var Xml = require('xml-stream');
var fileStream = fs.createReadStream('myFile.xml');
var xml = new Xml(fileStream);
// simple exponential retry algorithm based on promises
function exponentialRetry(task, initialDelay, maxDelay, maxRetry) {
var delay = initialDelay;
var retry = 0;
var closure = function() {
return task().catch(function(error) {
retry++;
if (retry > maxRetry) {
throw error
}
var promise = Promise.delay(delay).then(closure);
delay = Math.min(delay * 2, maxDelay);
return promise;
})
};
return closure();
}
var maxPressure = 100;
var currentPressure = 0;
var suspended = false;
var stopped = false;
var buffer = [];
// handle back pressure by storing incoming tasks in the buffer
// pause the xml stream as soon as we have enough tasks to work on
// resume it when the buffer is empty
function writeXmlDataWithBackPressure(product) {
// closure used to try to start a task
var tryStartTask = function() {
// if we have enough tasks running, pause the xml stream
if (!stopped && !suspended && currentPressure >= maxPressure) {
xml.pause();
suspended = true;
console.log("stream paused");
}
// if we have room to run tasks
if (currentPressure < maxPressure) {
// if we have a buffered task, start it
// if not, resume the xml stream
if (buffer.length > 0) {
buffer.shift()();
} else if (!stopped) {
try {
xml.resume();
suspended = false;
console.log("stream resumed");
} catch (e) {
// the only way to know if you've reached the end of the stream
// xml.on('end') can be triggered BEFORE all handlers are called
// probably a bug of xml-stream
stopped = true;
console.log("stream end");
}
}
}
};
// push the task to the buffer
buffer.push(function() {
currentPressure++;
// use exponential retry to ensure we will try this operation 100 times before giving up
exponentialRetry(function() {
return writeDataToDb(product)
}, 100, 2000, 100).finally(function() {
currentPressure--;
// a task has just finished, let's try to run a new one
tryStartTask();
});
});
// we've just buffered a task, let's try to run it
tryStartTask();
}
// write the product to database here :)
function writeDataToDb(product) {
// the following code is here to create random delays and random failures (just for testing)
var timeToWrite = Math.random() * 100;
var failure = Math.random() > 0.5;
return Promise.delay(timeToWrite).then(function() {
if (failure) {
throw new Error();
}
return null;
})
}
xml.on('endElement: product', writeXmlDataWithBackPressure);
Play with it, put some console.log to understand how it behaves.
I hope this will help you to solve your issue :)
I'm trying to create a chat system. Here it is:
The top field takes the name of the user while the bottom textarea takes the message. Once the user pressed enter the middle textarea ( which is disabled) updates itself with the new record. This is done using nodejs sockets. My problem is that if I open another instance of google chrome and I type something the other google chrome textarea does not update. I'm puzzled by this since my code should cover this case:
Here is the server.js code that handles the insertion. After it inserts it, it emits a socket with the last insertion so that the index.html can update itself.
io.sockets.on("connection",function(socket){
socket.on("send",function(data){
mongodb.connect("mongodb://127.0.0.1:27017/myDatabase",function(err,db){
if(err) throw err;
var to_be_inserted = {name: data.name,content: data.content};
db.collection("chat").insert(to_be_inserted,function(err,objects){
if(err) throw err;
var cursor = db.collection("chat").find().sort({_id: -1}).limit(1);
cursor.toArray(function(err,docs){
console.log("abc");
socket.emit("data_to_be_printed",docs);
});
});
})
})
})
As you can see once the data is inserted a socket is emitted containing the last row of the db. The index.html should handle this socket by updating itself. Here is the code that handles it:
<script>
var socket = io.connect("127.0.0.1:1337");
socket.on("data_to_be_printed",function(cursor){
var completed = document.getElementById("chatArea").value;
for(var i=0; i < cursor.length; i++)
{
console.log(cursor[i].name + " wrote: " + cursor[i].content);
var name = cursor[i].name;
var content = cursor[i].content;
var name_to_go = name.replace("/\r?\n|\r/g","");
var content_to_go = content.replace("/\r?\n|\r/g","");
completed+="\n" + name_to_go + ": " + content_to_go;
}
document.getElementById("chatArea").value = completed;
});
function keyfunction(e)
{
if((e.keyCode == 13 || e.which == 13) && !e.shiftKey)
{
socketEmitDb();
}
}
function socketEmitDb()
{
var name = document.getElementById("name").value;
var content = document.getElementById("writtenThing").value;
console.log("Name: " + name + " |||| content: " + content);
document.getElementById("name").value="";
document.getElementById("writtenThing").value="";
if(name.length > 0 && content.length > 0)
{
socket.emit("send",{"name": name,"content": content});
}else
{
alert("Make sure that the name and the message is not empty");
}
}
</script>
Should't the socket be emitted towards all opened sockets?
In your server program change socket.emit to io.sockets.emit.
socket.emit will send message to specific socket (client) that got connected. io.sockets.emit will send message to all connected sockets (clients)
Trying to send data from a serial device to web clients. I am using a serial to network proxy, ser2Net to make the data available to a server that acts on the data and sends a manipulated version of the data to web clients. The clients specify the location of the ser2net host and port. The core of this action is coded in node.js as shown here:
function getDataStream(socket, dataSourcePort, host) {
var dataStream = net.createConnection(dataSourcePort, host),
dataLine = "";
dataStream.on('error', function(error){
socket.emit('error',{message:"Source not found on host:"+ host + " port:"+dataSourcePort});
console.log(error);
});
dataStream.on('connect', function(){
socket.emit('connected',{message:"Data Source Found"});
});
dataStream.on('close', function(){
console.log("Close socket");
});
dataStream.on('end',function(){
console.log('socket ended');
dataConnection.emit('lost',{connectInfo:{host:host,port:dataSourcePort}});
});
dataStream.on('data', function(data) {
// Collect a line from the host
line += data.toString();
// Split collected data by delimiter
line.split(delimiter).forEach(function (part, i, array) {
if (i !== array.length-1) { // Fully delimited line.
//push on to buffer and emit when bufferSendCommand is present
dataLine = part.trim();
buffer.push(part.trim());
if(part.substring(0, bufferSendCommand.length) == bufferSendCommand){
gotALine.emit('new', buffer);
buffer=[];
}
}
else {
// Last split part might be partial. We can't announce it just yet.
line = part;
}
});
});
return dataStream;
}
io.sockets.on('connection', function(socket){
var stream = getDataStream(socket, dataSourcePort, host);
//dispense incoming data from data server
gotALine.on('new', function(buffer){
socket.emit('feed', {feedLines: buffer});
});
dataConnection.on('lost', function(connectInfo){
setTimeout(function(){
console.log("Trying --- to reconnect ");
stream = getDataStream(socket, connectInfo.port, connectInfo.host);
},5000);
});
// Handle Client request to change stream
socket.on('message',function(data) {
var clientMessage = JSON.parse(data);
if('connectString' in clientMessage
&& clientMessage.connectString.dataHost !== ''
&& clientMessage.connectString.dataPort !== '') {
stream.destroy();
stream = getDataStream(socket,
clientMessage.connectString.dataPort,
clientMessage.connectString.dataHost);
}
});
});
This works well enough until the serial device drops off and ser2net stops sending data. My attempt to catch the end of the socket and reconnect is not working. The event gets emitted properly but the setTimeout only goes once. I would like to find a way to keep on trying to reconnect while sending a message to the client informing or retry attempts. I am node.js newbie and this may not be the best way to do this. Any suggestions would be appreciated.
Ok I think I figured it out in the dataStream.on('data' ... I added a setTimeout
clearTimeout(connectionMonitor);
connectionMonitor = setTimeout(function(){doReconnect(socket);}, someThresholdTime);
The timeout executes if data stops coming in, as it is repeatedly cleared each time data comes in. The doReconnect function keeps trying to connect and sends a message to the client saying something bad is going on.
I am trying to build a simple tcp chat server, WITHOUT socket.io.
Now, I have no problem broadcasting data across all sockets connected to the server.
My problem is assigning a socket identifier to each connection and retrieving them from an object.
Here is the code:
var net = require('net');
//keep track of sockets
var allSockets = {
sockets: {},
addSocket: function(socket, nick, table) {
this.sockets[table+nick] = socket;
},
removeSocket: function(nick, table) {
if (this.sockets[table+nick] !== undefined) {
this.sockets[table+nick] = null;
delete this.sockets[table+nick];
}
}
};
// create the server
var server = net.createServer(function (socket) {
var connected = false;
var jsoncommand = true;
//first data sent MUST BE json formatted string in this format
//{"nick":"someid","table":"tablenumber"}
var thisnick = "";
var thistable = "";
// get client ip
socket.name = socket.remoteAddress;
//write something on each connect
socket.write("You are connecting from " + socket.name + "\n");
socket.write(socket.name + " joined chat\n");
//handle data streams
socket.on('data', function (data) {
if (jsoncommand) {
//JSON.parse the first data stream
var some = JSON.parse(data);
//assign a socket.id based on nick and table
allSockets.addSocket(socket, some.table, some.nick);
socket.write(some.nick + " joined " + some.table + "\n");
thisnick = some.nick;
thistable = some.table;
connected = true;
//no longer waiting for first stream as JSON
jsoncommand = false;
} else if (connected) {
//write whatever data it recieves (function is below)
broadcast(data, thistable);
} else {
socket.write("You are not connected to any table");
socket.destroy();
connected = false;
jsoncommand = true;
thisnick = "";
thistable = "";
}
});
// remove the socket from allSockets but broadcast
//only to other users on the same table
socket.on('end', function () {
allSockets.removeSocket(thisnick, thistable);
broadcast(thisnick + " has left table " + thistable, thistable);
});
//this function should select from the allSockets object,
//only those, whose property matches "table"
//and write to those sockets only, when called
function broadcast(message, table) {
allSockets.sockets.forEach(function(socket) {
if (socket.hasOwnProperty(table)) {
socket.write(message);
}
});
}
});
server.listen(8000);
console.log("running at port 8000\n");
Just deploy this on your machine and connect with nc to port 8000
and be sure that the first thing you send it is something like
{"nick":"mynick","table":"mytable"}
you will see a message that your nick joined your table.
now if you send it something else, based on the fact that it stored your table name,
it should echo whatever you send it, to you and to other connections with different
nicks but on the same table, but the server dies, throwing an error that the allSockets
object, does not have a "for" or "forEach" method or indexOf, or any other.
SO, how do I correct this?
If my nick is "john" and I joined "my_table", and also "mary", "lisa" and "ana" joine the same "my_table", assuming I don't know their nicks, but I do know they are on "my_table",
HOW do I select from the allSockets object, those sockets that contain "my_table".
I tried hasOwnProperty, but that returns boolean, which only tells me that there are sockets with that property, but how do I put them in a for, or foreach loop in order to write to them.
I know it may be a silly question, or maybe im not even aproaching this correctly, but im a node.js beginner, so any advice is greatly apreaciated.
By the way I put this together from examples across the web.
As for the JSON string, its the first thing being sentby a desktop app on connection. Anyways I chose it for testing purposes so don't bother with it.
I suppose error about forEach happens here:
allSockets.sockets.forEach(function(socket) {
While allSockets.sockets is not Array, but it is object (key > value model, like hash table).
So to loop through each socket in it, you should change loop to:
for(var key in allSockets.sockets) {
var socket = allSockets.sockets[key];
// your logic here
}
I have nodejs listening on tcp port and gets content from Flash XMLSocket. If I try to push a lot of data in one message from flash (XMLSocket.send(long_message)) I always end up with event stream.on("data", function(d) { fired while I want it to happen when entire message is transferred.
Flash's XMLSocket transfers data as UTF8 encoded string terminated with null byte.
How can I control my message consistency?
UPDATE
I've found similar question here. But there is no clear answer. I know the end of my message should be null byte, but could you please give me an example on how to store incomplete message and avoid overlapping with next/concurrent message
UPDATE2
After maerics's answer I've done something like
var server = net.createServer(function(stream) {
var dataBlock = "";
stream.on("data", function(d) {
processChunk(d);
});
function processChunk(data) {
var chunks = data.split("\0");
while (chunks.length > 1) {
if (dataBlock.length > 0) {
dataBlock += chunks.shift();
processIncompingMessage(dataBlock);
dataBlock = "";
}
else {
processIncompingMessage(chunks.shift());
}
}
dataBlock += chunks.shift();
}
}
Here's what I would do (tested):
var net = require('net');
var server = net.createServer(function (conn) {
var msg = ''; // Current message, per connection.
conn.setEncoding('utf8');
conn.on('message', function (m) {
console.log('MESSAGE: ' + m);
});
conn.on('data', function (data) {
msg += data.toString('utf8');
if (msg.charCodeAt(msg.length - 1) == 0) {
conn.emit('message', msg.substring(0, msg.length - 1));
msg = '';
}
});
});
Note that it is possible that multiple null separated messages could be encoded in a single data chunk, so you should expand this example to separate the data by null characters and process each one separately. Also, you might want to process the final, potentially incomplete message on the connection 'end' event.