Node.js read TCPSocket and write to a variable - node.js

I've got a litte problem with my code snipet. I wrote a example to learn the TCP Socket Communication for a project. For the moment i can send a TCP Socket and read the answer, but i want to use the answer in other software parts. For that i tried to use the variable socketmessage but this doesn't work. Does anyone have the answer for my problem? Thanks a lot
var net = require('net');
var client = new net.Socket();
var HOST='127.0.0.1';
var PORT='20000';
var MSG="{\"REQUEST\":\"STATUS\"}";
var socketmessage;
socketmessage=getSocketMessage(MSG);
console.log ("Socket Message: " + socketmessage);
function getSocketMessage(tcpmsg){
var outData;
client.connect(PORT, HOST, function() {
console.log("Client: " + tcpmsg);
client.write(tcpmsg);
});
client.setTimeout(5000, function() { client.destroy(); });
client.on('data', function(data) {
console.log('Server: ' + data);
outData = data.toString('utf8');
console.log ("Socketmessage: " + outData);
client.destroy();
});
//Add a 'close' event handler for the client socket
client.on('close', function() {
console.log('Connection closed');
});
// Add a 'error' event handler for the client socket
client.on('error', function(error) {
console.log('Error Connection: ' + error);
});
return outData;
}
Terminal:
Socket Message: undefined
Client: {"REQUEST":"STATUS"}
Server: {"STATUS":0.000000}

This is because function getSocketMessage is asynchronous. You are trying to return the received message. The function returns immediately, outData being undefined then. Its value is
set when data arrives from server. The network I/O is evented, the event you use is data
client.on('data', function(data) {
The received message can only be handled properly inside the event-handler for data. You would have to call your other part from here itself.

You can create a read stream and once the stream ends you may process the data.
var readStream = fs.createReadStream(path of the file, 'utf8'); //you can use path.join(loc, filename)
var data = '';
readStream.on('data', function(chunk:any) {
data += chunk;
}).on('end', function() {
let gotContent = data.toString(); //
doWhatEver(gotContent) //the method is async or promose
then(function(result){
console.log(result); //desired output
})
});

Related

NodeJS net package error when client writing to server

So I took the following code https://gist.github.com/tedmiston/5935757 example and modified it slightly such that the client writes data to the server. This should be doable since the client socket does support a write. In one of my use cases the client sends a fair amount of data from the client to server in which case I get an ECONNRESET error. Attached are client and server snippets. I was wondering if anyone has seen this and if they know what is going wrong under the covers.
Here is a copy of my client:
var net = require('net');
var client = new net.Socket({writeable: true}); //writeable true does not appear to help
client.on('close', function() {
console.log('Connection closed');
});
client.on('error', function(err) {
console.error('Connection error: ' + err);
console.error(new Error().stack);
});
client.connect(5900, '127.0.0.1', function() {
var count = 0;
console.log('Connected');
for(var i = 0; i < 100000; i++) {
client.write('' + i + '');
//bufferSize does not seem to be an issue
//console.info(client.bufferSize);
}
});
and my server:
var net = require('net');
var count = 0;
var server = net.createServer(function(socket) {
socket.pipe(socket); //With this uncommented I get an ECONNRESET exception after 14299 writes with it commented it hangs after 41020 writes
socket.on('data', function(data) {
console.info(count++); //This makes it occur sooner
//count++;
//maxConnections is not the issue
//server.getConnections(function(err, count) {
//console.info('count = ' + count);
//});
});
socket.on('close', function() {
console.info('Socket close');
});
socket.on('error', function(err) {
console.error('Socket error: ' + err + ', count = ' + count);
console.error(new Error().stack);
});
});
server.listen(5900, '127.0.0.1');
When the client has finished sending data in the for loop inside its connect event handler, there is nothing more for it to do so it exits. That terminates the connection, and the server gets an ECONNRESET to inform it that the connection has been broken.
If you want the client to hang around after it has finished sending then give it some reason to stay alive. Registering a data event handler on the client socket is one possibility.

Test dgram binding in Node.js

How can I write a test to verify that my application is binding to a particular UDP port via dgram? Below is a snippet of the code I am wanting to test:
var dgram = require('dgram');
var socket_json = dgram.createSocket('udp4');
var socket_syslog = dgram.createSocket('udp4');
socket_json.bind(6371);
socket_syslog.bind(6370);
I am attempting to use Mocha and Chai for my tests:
var expect = require("chai").expect;
var sawyer = require("../server.js");
describe("Sawyer", function() {
it("should bind to UDP port 6370", function() {
// some kind of expect statement here
});
it("should bind to UDP port 6371", function() {
// some kind of expect statement here
});
});
Any help is greatly appreciated!
According to the documentation, an error event will be emitted on socket or an error could be thrown if the socket.bind() fails.
So you could watch for those two things and also add a callback argument to socket.bind() (or add a listening event handler on socket, which is what socket.bind() does for you), which is called once the binding is complete.
Example:
socket_json.on('error', function(err) {
console.log('Socket error: ' + err.message);
}).on('listening', function() {
console.log('Successfully bound JSON socket!');
});
try {
socket_json.bind(6371);
} catch (ex) {
console.log('Failed to bind JSON socket: ' + ex.message);
}

NodeJS: TCP socket server only returns data the first time

I'm attempting to write a small relay script in node.js that listens for incoming TCP connections on a local socket, and when it gets one, forwards the traffic to a 3rd party. It must also take any returned data from that 3rd party and send it back to the original local socket. I've tried code like http://delog.wordpress.com/2011/07/19/a-tcp-relay-mechanism-with-node-js/ and it does work, but it requires the sender be a server that is listening on a socket itself, and my utility is intended to work with any program that tries to create an outbound TCP connection. Unfortunately, the problem I'm running into is that everything works great the first time with the client sending the data to the "router" program, and the router forwarding it to another server, and then returning the data from the client. However, when the client program ends or is terminated and attempts to reconnect, I get this:
events.js:72
throw er; // Unhandled 'error' event
^
Error: This socket has been ended by the other party
at Socket.writeAfterFIN [as write] (net.js:275:12)
at Socket.<anonymous> (/root/tcp_loop.js:37:17)
at Socket.emit (events.js:117:20)
at Socket.<anonymous> (_stream_readable.js:748:14)
at Socket.emit (events.js:92:17)
at emitReadable_ (_stream_readable.js:410:10)
at emitReadable (_stream_readable.js:406:5)
at readableAddChunk (_stream_readable.js:168:9)
at Socket.Readable.push (_stream_readable.js:130:10)
at TCP.onread (net.js:528:21)
I ripped out all of the logic and distilled the test case into a small bit of code: one server that acts as both the router (listening on port 8124) as well as the "remote" server (on port 9999), though my testing indicates it makes no difference weather the remote server is on the same machine, on the Internet, etc. Here is the server code:
var net = require('net'),
util = require('util')
;
// The loop_server simulates a remote service.
// The error occurs whether using it here, or actually forwarding
// the data to a remote host.
var loop_server = net.createServer(function(loop) {
console.log("Loop server connected");
loop.on("end", function() {
console.log("Loop server disconnected");
});
loop.on("data", function(data) {
console.log("Loop got data: " + data);
loop.write(data);
});
}).listen(9999, function() {
console.log("Loop server bound");
});
var remote_socket = net.connect(9999, function() {
console.log("Remote connected");
var local_server = net.createServer(function(local_socket) { //'connection' listener
console.log('Local server connected');
local_socket.on('end', function() {
console.log('Local server disconnected');
// local_socket.destroy();
});
local_socket.on('data', function(ldata) {
console.log("Local socket got data: " + ldata);
remote_socket.write(ldata);
});
remote_socket.on('data', function(rdata) {
console.log("Remote socket got data: " + rdata);
local_socket.write(rdata);
});
local_socket.write('hello\r\n');
}).listen(8124, function() { //'listening' listener
console.log('Local server bound');
});
}); // remote_socket
The thing that's failing is the local_socket.write(rdata); in the remote_socket.on('data', ... handler. It works the first time the router is started and the client connects, but never again.
For reference, here is the code for the little client app that I've been using. I get the same result with a perl script, telnet, etc.:
var net = require('net');
var client = new net.Socket();
client.connect(8124, function() {
console.log('CONNECTED TO: localhost:8124');
client.write('Single text message from the client app');
});
client.on('data', function(data) {
console.log('DATA: ' + data);
});
client.on('close', function() {
sconsole.log('Connection closed');
});
Any insight would be greatly appreciated. I feel like I must be missing something extremely simple here...
Update:
Nitzin's solution below is a better way to do this, but in my particular example below, the solution is to remove old remote_socket.on('data') listeners before creating new ones, e.g.:
var remote_socket = net.connect(9999, function() {
console.log("Remote connected");
var local_server = net.createServer(function(local_socket) { //'connection' listener
console.log('Local server connected');
remote_socket.removeAllListeners('data');
...
remote_socket.on('data', function(rdata) {
console.log("Remote socket got data: " + rdata);
local_socket.write(rdata);
});
You should not destroy the socket. It closes both ends of the socket. You should only .end() it, which closes your writing end.
EDIT
Destroying the socket is bad, as I originally wrote, but your real problem is something completely different: you got your proxy (what you call "local") and echo (what you call "remote") servers backwards: the proxy server should make a new connection to the echo server for each new connection the proxy server gets, not the other way around as you have it now.
The only end() needed is in the client, to let the server know you're done writing.
Here is client.js:
var net = require('net');
var client = new net.Socket();
client.connect(8124, function() {
console.log('CLIENT: CONNECTED: localhost:8124');
client.write('single text message from the client app');
client.end();
});
client.on('data', function(data) {
console.log('CLIENT: GOT DATA: ' + data);
});
client.on('close', function() {
console.log('CLIENT: CONNECTION CLOSED');
});
And here is servers.js:
var net = require('net'),
util = require('util');
net.createServer(function(conn) {
console.log('ECHO_SERVER: CONN: new connection');
conn.on('end', function() {
console.log('ECHO_SERVER: CONN: disconnected');
});
conn.on('data', function(data) {
console.log('ECHO_SERVER: CONN: GOT DATA: ' + data);
conn.write(data);
});
}).listen(9999, function() {
console.log('ECHO_SERVER STARTED');
});
net.createServer(function(conn) {
console.log('PROXY_SERVER: CONN: new connection');
var remote = net.connect(9999, function() {
console.log('PROXY_SERVER: CONNECTED TO ECHO_SERVER');
conn.on('end', function() {
console.log('PROXY_SERVER: CONN: disconnected');
remote.end();
});
conn.on('data', function(data) {
console.log('PROXY_SERVER: CONN: GOT DATA FOR ECHO_SERVER: ' + data);
remote.write(data);
});
remote.on('data', function(data) {
console.log('PROXY_SERVER: CONN: GOT DATA FROM ECHO_SERVER: ' + data);
conn.write(data);
});
});
}).listen(8124, function() {
console.log('PROXY_SERVER STARTED');
});
As you can see, for each conn to the proxy server, there is a new remote going to the echo server.

Node.js: client doesn't die when TCP connection closes

I built a simple TCP server and a simple TCP client in Node.js
Now, when the client sends "exit" to the server, the connection is successfully closed. The server deletes the socket from its sockets list and sends "Bye bye!" to the client.
The connection on the client is closed as well but the app is still waiting for other inputs, so it doesn't die and I'm forced to type CTRL+C.
I tried adding process.exit() after connection closes but it doesn't work:
CLIENT CODE:
var net = require('net'),
config = require(__dirname + '/config.json'),
connection = net.createConnection(config.port, config.host);
connection.setEncoding('utf8');
connection.on('connect', function () {
console.log('Connected');
});
connection.on('error', function (err) {
console.error(err);
});
connection.on('data', function (data) {
console.log('ยป ' + data);
});
connection.on('close', function() {
console.log('Connection closed');
});
process.stdin.on('data', function (data) {
if ((new String(data)).toLowerCase() === 'exit') {
connection.end();
process.exit();
}
else {
connection.write(data);
}
});
process.stdin.resume();
SERVER CODE:
var server = require('net').createServer(),
config = require(__dirname + '/config.json'),
sockets = [];
server.on('connection', function (socket) {
socket.setEncoding('UTF-8');
socket.on('data', function (data) {
console.log('Received data: ' + data);
if (data.trim().toLowerCase() === 'exit') {
socket.write("Bye bye!\n");
socket.end();
}
else {
sockets.forEach(function (client) {
if (client && client != socket) {
client.write(data);
}
});
}
});
socket.on('close', function () {
console.log('Connection closed');
sockets.splice(sockets.indexOf(socket), 1);
console.info('Sockets connected: ' + sockets.length);
});
sockets.push(socket);
});
server.on('listening', function () {
console.log('Server listening');
});
server.on('close', function () {
console.log('Server is now closed');
});
server.on('error', function (err) {
console.log('error:', err);
});
server.listen(config.port);
EDIT:
I added a client connection "on close" event handler. So, the string "Connection closed" is now printed by the server and by the client too.
I think you're looking for this: socket.unref().
From Node.js documentation (https://nodejs.org/api/net.html#net_socket_unref):
socket.unref()#
Calling unref on a socket will allow the program to exit if this is the only active socket in the event system. If the socket is already unrefd calling unref again will have no effect.
Some time ago when improving the tests suite for node-cubrid module, I had encountered the same problem. After all tests have passed, nodeunit process didn't quit because node-cubrid was using connection.end() to close the client socket when timeout occurs, just like you did.
Then I replaced connection.end() with connection.destroy(), a cleaner way to ensure the socket is really closed without actually terminating the running process, which, I think, is a better solution than the above suggested process.exit(). So, in your client code context, I would do:
process.stdin.on('data', function (data) {
if ((new String(data)).toLowerCase() === 'exit') {
connection.destroy();
}
else {
connection.write(data);
}
});
According to Node.js documentation:
socket.destroy()
Ensures that no more I/O activity happens on this socket. Only necessary in case of errors (parse error or so).
I doubt that if ((new String(data)).toLowerCase() === 'exit') is succeeding because data most likely has a trailing newline (in your server, you trim() before doing the comparison, but not in the client).
If that's fixed, you've got a logic problem: when getting "exit" you close the connection without sending "exit" to the server, so the server code that looks for "exit" will never execute.
You have to put the process.exit() instruction only on the last event handler. So, in this case you have to put it inside the client connection "on close" event handler:
CLIENT:
connection.on('close', function() {
console.log('Connection closed');
process.exit();
});
Try with Event: 'close' in the server:
http://nodejs.org/api/net.html#net_event_close

Socket.io not sending a message to all connected sockets

I'm trying out node.js and socket.io. I wan't to use to remove a ping function I have to get updates from my server. Here is an example code of what I'm doing:
var app = require('http').createServer(),
io = require('socket.io').listen(app),
cp = require('child_process');
app.listen(8080);
//I check a global value for all the connected users from the php command line
var t = setInterval(function(){
cp.exec('/usr/bin/php /Users/crear/Projects/MandaFree/symfony api:getRemainingMessages',
function(err, stdout){
if (err) {
io.sockets.emit('error', 'An error ocurred while running child process.');
} else {
io.sockets.emit('change', stdout);
}
console.log('Remaining messages: ' + stdout);
});
}, 3000);
var remaining = io.of('/getRemainingMessages')
.on('connection', function(socket){
socket.on('disconnect', function(){});
});
The Issue here, is that when I call io.sockets.emit() the debug console tells me it is doing something, but it looks like it is not getting to the clients. Because they are doing nothing.
I use to have one interval for every connected client, and when I used socket.emit() it did worked. But it is not the optimal solution.
UPDATE:
Here is my client side code.
var remaining = io.connect('http://127.0.0.1:8080/getRemainingMessages');
remaining.on('change', function(data){
console.log('Remaining messages: ' + data );
$('#count').html(data);
});
remaining.on('error', function(error){
console.log(error);
});
Had a very similar issue couple of days back and looks like socket.io had some changes in the API. I have never worked with symfony and am hoping the issues are the same.
I have a working demo of socket.io sending and receiving a message - uploaded to https://github.com/parj/node-websocket-demo as a reference
Essentially two changes
On Server side - changed socket.on to socket.sockets.on
var socket = io.listen(server);
socket.sockets.on('connection', function(client)
On Client side - URL and port not required as it is autodetected.
var socket = io.connect();
This has been tested using Express 2.5.2 and Socket.io 0.8.7
I have amalgamated your server code with mine, would you be able to try this on the server and my client javascript and client html just to see if it is working?
var socket = io.listen(server);
socket.sockets.on('connection', function(client){
var connected = true;
client.on('message', function(m){
sys.log('Message received: '+m);
});
client.on('disconnect', function(){
connected = false;
});
var t = setInterval(function(){
if (!connected) {
return;
}
cp.exec('/usr/bin/php /Users/crear/Projects/MandaFree/symfony api:getRemainingMessages',
function(err, stdout){
if (err) {
client.send('error : An error ocurred while running child process.');
} else {
client.send('change : ' + stdout);
}
console.log('Remaining messages: ' + stdout);
});
}, 3000);
t();
});

Resources