I can't seem to override net.Socket.write. The example below is not my real use case, but rather a scaled down, runnable example to demonstrate the issue.
const net = require("net");
class UTF8Socket extends net.Socket {
constructor () {
super();
this.setEncoding("utf8");
}
write(data, cb) {
console.log("Sending...");
super.write(data, "utf8", cb);
}
end(data) {
console.log("Ending socket...");
super.end(data);
}
}
// Setup server
const server = net.createServer(socket => {
socket.setEncoding("utf8");
socket.on("data", (res) => console.log("Server received data:", res));
});
server.listen(8080, '127.0.0.1');
// Create a UTF8Socket and write to server
const socket = new UTF8Socket();
socket.connect(8080, "127.0.0.1", () => {
socket.write("test write\n");
socket.end("test end");
});
Expected output:
Sending...
Ending socket...
Server received data: test write
test end
Actual output:
Ending socket...
Server received data: test write
test end
The overridden end function is called as you can see from the output, but only the original net.Socket.write is ever called. I feel like I'm missing something...
I guess it's because of this code in the implementation of net.Socket.connect():
if (this.write !== Socket.prototype.write)
this.write = Socket.prototype.write;
Possible workaround:
connect() {
let result = super.connect.apply(this, arguments);
this.write = UTF8Socket.prototype.write.bind(this);
return result;
}
Related
I am making use of "socket.io-client" and "socket.io stream" to make a request and then stream some data. I have the following code that handles this logic
Client Server Logic
router.get('/writeData', function(req, res) {
var io = req.app.get('socketio');
var nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
var nameNodeData = {};
async.waterfall([
checkForDataNodes,
readFileFromS3
], function(err, result) {
if (err !== null) {
res.json(err);
}else{
res.json("Finished Writing to DN's");
}
});
function checkForDataNodes(cb) {
nameNodeSocket.on('nameNodeData', function(data) {
nameNodeData = data;
console.log(nameNodeData);
cb(null, nameNodeData);
});
if (nameNodeData.numDataNodes === 0) {
cb("No datanodes found");
}
}
function readFileFromS3(nameNodeData, cb) {
for (var i in nameNodeData['blockToDataNodes']) {
var IP = nameNodeData['blockToDataNodes'][i]['ipValue'];
var dataNodeSocket = io.connect('http://'+ IP +":5000");
var ss = require("socket.io-stream");
var stream = ss.createStream();
var byteStartRange = nameNodeData['blockToDataNodes'][i]['byteStart'];
var byteStopRange = nameNodeData['blockToDataNodes'][i]['byteStop'];
paramsWithRange['Range'] = "bytes=" + byteStartRange.toString() + "-" + byteStopRange.toString();
//var file = require('fs').createWriteStream('testFile' + i + '.txt');
var getFileName = nameNodeData['blockToDataNodes'][i]['key'].split('/');
var fileData = {
'mainFile': paramsWithRange['Key'].split('/')[1],
'blockName': getFileName[1]
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
s3.getObject(paramsWithRange).createReadStream().pipe(stream);
//dataNodeSocket.disconnect();
}
cb(null);
}
});
Server Logic (that gets the data)
var dataNodeIO = require('socket.io')(server);
var ss = require("socket.io-stream");
dataNodeIO.on('connection', function(socket) {
console.log("Succesfully connected!");
ss(socket).on('sendData', function(stream, data) {
var IP = data['ipValue'];
var blockName = data['blockName'];
var mainFile = data['mainFile'];
dataNode.makeDir(mainFile);
dataNode.addToReport(mainFile, blockName);
stream.pipe(fs.createWriteStream(mainFile + '/' + blockName));
});
});
How can I properly disconnect the connections in function readFileFromS3. I have noticed using dataNodeSocket.disconnect() at the end does not work as I cannot verify the data was received on the 2nd server. But if I comment it out, I can see the data being streamed to the second server.
My objective is to close the connections in Client Server side
It appears that the main problem with closing the socket is that you weren't waiting for the stream to be done writing before trying to close the socket. So, because the writing is all asynchronous and finishes sometime later, you were trying to close the socket before the data had been written.
Also because you were putting asynchronous operations inside a for loop, you were also running all your operations in parallel which may not be exactly what you want as it makes error handling more difficult and server load more difficult.
Here's the code I would suggest that does the following:
Create a function streamFileFromS3() that streams a single file and returns a promise that will notify when it's done.
Use await in a for loop with that streamFileFromS3() to serialize the operations. You don't have to serialize them, but then you would have to change your error handling to figure out what to do if one errors while the others are already running and you'd have to be more careful about concurrency issues.
Use try/catch to catch any errors from streamFileFromS3().
Add error handling on the stream.
Change all occurrences of data['propertyName'] to data.propertyName. The only time you need to use brackets is if the property name contains a character that is not allowed in a Javascript identifier or if the property name is in a variable. Otherwise, the dot notation is preferred.
Add socket.io connection error handling logic for both socket.io connections.
Set returned status to 500 when there's an error processing the request
So, here's the code for that:
const ss = require("socket.io-stream");
router.get('/writeData', function(req, res) {
const io = req.app.get('socketio');
function streamFileFromS3(ip, data) {
return new Promise((resolve, reject) => {
const dataNodeSocket = io.connect(`http://${ip}:5000`);
dataNodeSocket.on('connect_error', reject);
dataNodeSocket.on('connect_timeout', () {
reject(new Error(`timeout connecting to http://${ip}:5000`));
});
dataNodeSocket.on('connection', () => {
// dataNodeSocket connected now
const stream = ss.createStream().on('error', reject);
paramsWithRange.Range = `bytes=${data.byteStart}-${data.byteStop}`;
const filename = data.key.split('/')[1];
const fileData = {
'mainFile': paramsWithRange.Key.split('/')[1],
'blockName': filename
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
// get S3 data and pipe it to the socket.io stream
s3.getObject(paramsWithRange).createReadStream().on('error', reject).pipe(stream);
stream.on('close', () => {
dataNodeSocket.disconnect();
resolve();
});
});
});
}
function connectError(msg) {
res.status(500).send(`Error connecting to ${NAMENODE_ADDRESS}`);
}
const nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
nameNodeSocket.on('connect_error', connectError).on('connect_timeout', connectError);
nameNodeSocket.on('nameNodeData', async (nameNodeData) => {
try {
for (let item of nameNodeData.blockToDataNodes) {
await streamFileFromS3(item.ipValue, item);
}
res.json("Finished Writing to DN's");
} catch(e) {
res.status(500).json(e);
}
});
});
Other notes:
I don't know what paramsWithRange is as it is not declared here and when you were doing everything in parallel, it was getting shared among all the connections which is asking for a concurrency issue. In my serialized implementation, it's probably safe to share it, but the way it is now bothers me as it's a concurrency issue waiting to happen.
I'm totally new to the whole nodeJS asynchronous-y callback-y programming so I need more like a guidance to understanding what I'm even doing. With that said, I have two files main.js and server.js
My main file looks like this:
var server=require('./server.js');
server();
function WhenUserClicksButton(){
server();
}
and my server file looks like this:
var net = require('net');
function server(){
net.createServer(function (socket) {
socket.write('\x16'); //SYN character
socket.on('data', function (data) {
//handle data from client
});
}).listen(33333);
}
First call of server(); starts the TCP server. Then function WhenUserClicksButton is called when user clicks button (duhh) in a GUI. But it attempts to start the server again so I get
Error: listen EADDRINUSE :::33333
I got why this is happening but I can't think of a solution for it. What I really need is:
Start the server and listen on 33333
When nothing is happening server and client just exchanges SYN and ACK characters every few seconds (I already have this part done, I just removed it from this example for clarity because it's not really topic of this question)
When user click button change socket.write('\x16'); to socket.write('something');
Then wait for server and client to exchange data and after everything is done return results back to main.js
As I said, I'm new to this and I believe my problem lies in not understanding fully of what I'm doing. Any help and explanations are welcome!
I think you're very near where you need to be. I would do something like this:
server.js
var net = require('net');
var netServer = null;
var netSocket = null;
function sendData(data) {
if (netServer && netSocket) {
console.log('Send data: sending: ', data);
netSocket.write(data);
}
}
function startServer(){
netServer = net.createServer(function (socket) {
netSocket = socket;
socket.write('\x16'); //SYN character
socket.on('data', function (data) {
console.log('Server: data from client: ', data);
if (data.length === 1 && data[0] === 0x16) {
// log and ignore SYN chars..
console.log('SYN received from client');
} else if (newDataCallback) {
newDataCallback(data);
};
});
});
console.log('Server listening on 33333..');
netServer.listen(33333);
}
var newDataCallback = null;
function setNewDataCallback(callback) {
newDataCallback = callback;
}
module.exports = {
sendData: sendData,
startServer: startServer,
setNewDataCallback: setNewDataCallback
};
main.js
var server = require('./server');
function newDataCallback(data) {
console.log('newDataCallback: New data from server: ', data);
}
server.setNewDataCallback(newDataCallback);
server.startServer();
function wheneverUserClicksButton() {
server.sendData('something');
}
testClient.js
var clientSocket = net.createConnection(33333, "127.0.0.1");
clientSocket.on('data', (someData) => {
console.log('Data received', someData);
});
clientSocket.on('connect', () => {
console.log('Client Socket connected ');
clientSocket.write('Hello from client');
});
I have a server
var connect = require('connect');
var serveStatic = require('serve-static');
var HTMLServer = function(path){
this.path = path;
this.server = connect().use(serveStatic(this.path));
this.startServer = function(callback){
this.server = this.server.listen(8080, callback);
};
this.stopServer = function(callback){
this.server.close(callback);
}
}
And I use it as follows:
var thisServer = new HTMLServer(__dirname);
thisServer.startServer(function(){
console.log('Server running on 8080...');
setTimeout(function(){
thisServer.stopServer(function(){
console.log('Server closed');
});
}, 3000);
});
As expected, server starts and after 3000 milliseconds it stops.
But, if within these 3000 milliseconds I make a request to this server, the stopServer is called, however the server is not closed.
I'm sure this line this.server.close(callback); gets executed, but doesn't close the server as I expect.
How can I fix that?
Is a request to the server changing the server instance in a way that needs a special handling?
Later edit:
I would like to add some precision now that I left the code running. It seems the server does get closed, however not instantly, but after an amount of time that I don't understand, no longer than 5 minutes.
So the close operation seems to be delayed. Can I make it instant somehow?
While #jfriend00 was correct that node.js keeps running until all exiting sockets are finished, the process.exit solution was a bit too radical for my use case and I needed a cleaner solution to close the server gracefully.
Looking into getConnections only added more confusion since it didn't function as expected. (for example it returned 2 connections even if I didn't make any request).
I also looked into server.listening but it returned false even if the server accepted more requests. Perhaps accepts connection from a client that made requests before closing the server.
Anyway, the solution for me was to use the http-shutdown lib which essentially adds the following .shutdown method to your server object.
function addShutdown(server) {
var connections = {};
var isShuttingDown = false;
var connectionCounter = 0;
function destroy(socket, force) {
if (force || (socket._isIdle && isShuttingDown)) {
socket.destroy();
delete connections[socket._connectionId];
}
};
function onConnection(socket) {
var id = connectionCounter++;
socket._isIdle = true;
socket._connectionId = id;
connections[id] = socket;
socket.on('close', function() {
delete connections[id];
});
};
server.on('request', function(req, res) {
req.socket._isIdle = false;
res.on('finish', function() {
req.socket._isIdle = true;
destroy(req.socket);
});
});
server.on('connection', onConnection);
server.on('secureConnection', onConnection);
function shutdown(force, cb) {
isShuttingDown = true;
server.close(function(err) {
if (cb) {
process.nextTick(function() { cb(err) });
}
});
Object.keys(connections).forEach(function(key) {
destroy(connections[key], force);
});
};
server.shutdown = function(cb) {
shutdown(false, cb);
};
server.forceShutdown = function(cb) {
shutdown(true, cb);
};
return server;
};
With this function, I can update my server as follows, and now stopServer works as expected:
var HTMLServer = function(path){
this.path = path;
this.server = connect().use(serveStatic(this.path));
this.startServer = function(callback){
this.server = addShutdown(this.server.listen(8080, callback));
};
this.stopServer = function(callback){
console.log("I was called");
this.server.shutdown(callback);
}
}
This event is firing twice. I'm trying to figure out why.
On one client, I have:
import Net from 'net';
import Chalk from 'chalk';
const fallback = [2,5,10,25,50,100,250,500,1000,2000];
class LocalNetworkInterface {
constructor({path}) {
this._sock = new Net.Socket();
this._pending = {};
this._count = 0;
this._retry = 0;
const connect = () => {
this._sock.connect({path});
};
this._sock.on('connect',() => {
this._retry = 0;
console.log(`Connected to ${Chalk.underline(path)}`);
});
this._sock.on('data',buffer => {
let data = JSON.parse(buffer);
this._pending[data.queryId].resolve(data);
delete this._pending[data.queryId];
});
this._sock.on('end', () => {
console.log(`Lost connection to ${Chalk.underline(path)}. Attempting to reconnect...`);
connect();
});
this._sock.on('error', err => {
if(err.code === 'ENOENT') {
let ms = fallback[this._retry];
if(this._retry < fallback.length - 1) ++this._retry;
console.log(`Socket server unavailable. Trying again in ${ms}ms`);
setTimeout(connect, ms);
}
});
connect();
}
// ...
}
And the server:
const sockServer = Net.createServer(c => {
c.on('data', buffer => {
let data = JSON.parse(buffer);
// log('Received',data);
let ql = queryLogger();
runQuery(Object.assign({}, data, {schema})).then(result => {
ql(`${Chalk.magenta('socket')} ${print(data.query).trim()}`);
let response = Object.assign({}, result, {queryId: data.queryId});
c.write(JSON.stringify(response));
});
})
});
sockServer.on('error', serverError => {
if(serverError.code === 'EADDRINUSE') {
let clientSocket = new Net.Socket();
clientSocket.on('error', clientError => {
if(clientError.code === 'ECONNREFUSED') {
FileSystem.unlink(SOCK_FILE, unlinkErr => {
if(unlinkErr) throw unlinkErr;
sockServer.listen(SOCK_FILE, () => {
log(`Sock server improperly shut down. Listening on '${sockServer.address()}'`)
});
});
}
});
clientSocket.connect({path: SOCK_FILE}, () => {
throw new Error(`Server already running`);
});
}
});
['SIGTERM','SIGINT'].forEach(signal => process.on(signal, () => {
console.log(`\rReceived ${Chalk.yellow(signal)}, shutting down ${Chalk.red('❤')}`);
sockServer.close();
process.exit();
}));
sockServer.listen(SOCK_FILE, () => {
log(`Listening on ${Chalk.underline(sockServer.address())}`)
});
When I restart the server, I see "Lost connection" twice on the client. Why?
The documentation says:
Emitted when the other end of the socket sends a FIN packet.
The server isn't sending two "FIN" packets is it? Any way I can verify?
Seeing this in docs in regard to connect...
"...This function is asynchronous. When the 'connect' event is emitted the socket is established. If there is a problem connecting, the 'connect' event will not be emitted, the 'error' event will be emitted with the exception."
The fact that the connect event might simply not be firing simply making it look to you like the end event fired twice? Like #robertklep said, maybe expand that error check for more than specific code.
I think it's because on end, I immediately try to reconnect and then the same event is being caught again. Seems kind of strange that it would do that, but delaying it to the next tick works:
this._sock.on('end', () => {
console.log(`${Chalk.yellow('Lost connection')} to ${Chalk.underline(path)}. Attempting to reconnect...`);
process.nextTick(connect);
});
I'm using sockjs with standard configuration.
var ws = sockjs.createServer();
ws.on('connection', function(conn) {
conn.on('data', function(message) {
wsParser.parse(conn, message)
});
conn.on('close', function() {
});
});
var server = http.createServer(app);
ws.installHandlers(server, {prefix:'/ws'});
server.listen(config.server.port, config.server.host);
wsParser.parse function works like this:
function(conn, message) {
(...)
switch(message.action) {
case "titleAutocomplete":
titleAutocomplete(conn, message.data);
break;
(...) // a lot more of these
}
Each method called in switch sends back a message to client.
var titleAutocomplete = function(conn, data) {
redis.hgetall("titles:"+data.query, function(err, titles){
if(err) ERR(err);
if(titles) {
var response = JSON.stringify({"action": "titleAutocomplete", "data": {"titles": titles}});
conn.write(response);
}
})
};
Now my problem is that I'd like to make tests for my code (better late than never I guess) and I have no idea how to do it. I started writing normal http tests in with mocha + supertest but I just don't know how to handle websockets.
I'd like to have only one websocket connection to reuse through all tests, I'm binding the websocket connection with user session after first message and I want to test that persistence as well.
How do I make use of ws client's onmessage event and utilize it in my tests? How the tests can tell apart received messages and know which one they are supposed to wait for?
Collegue at work asked if it really needs to be a client connection or would it be possible to just mock it up. It turned out it was the way to go. I wrote a little helper class wsMockjs
var wsParser = require("../wsParser.js");
exports.createConnectionMock = function(id) {
return {
id: id,
cb: null,
write: function(message) {
this.cb(message);
},
send: function(action, data, cb) {
this.cb = cb;
var obj = {
action: action,
data: data
}
var message = JSON.stringify(obj);
wsParser.parse(this, message);
},
sendRaw: function(message, cb) {
this.cb = cb;
wsParser.parse(this, message);
}
}
}
Now in my mocha test I just do
var wsMock = require("./wsMock.js");
ws = wsMock.createConnectionMock("12345-67890-abcde-fghi-jklmn-opqrs-tuvwxyz");
(...)
describe('Websocket server', function () {
it('should set sessionId variable after handshake', function (done) {
ws.send('handshake', {token: data.token}, function (res) {
var msg = JSON.parse(res);
msg.action.should.equal('handshake');
msg.data.should.be.empty;
ws.should.have.property('sessionId');
ws.should.not.have.property('session');
done();
})
})
it('should not return error when making request after handshake', function (done) {
ws.send('titleAutocomplete', {query: "ter"}, function (res) {
var msg = JSON.parse(res);
msg.action.should.equal('titleAutocomplete');
msg.data.should.be.an.Object;
ws.should.have.property('session');
done();
})
})
})
It works like a charm and persist connection state and variables between requests.