I'm trying to write a very simple node TCP server which reads in the full input stream and writes out some function of the input. The output cannot be generated without the full input so the writes cannot be streamed as the input comes in. For simplicity sake in this post, I have omitted the actual transformation of the input and am just writing back the input.
My initial attempt was to write within the end event handler:
const net = require('net');
const server = net.createServer(async client => {
let data = '';
client.on('end', () => {
client.write(data);
});
client.on('data', part => {
data += part.toString();
});
client.pipe(client);
});
server.listen(8124);
But this results in a Socket.writeAfterFIN error "This socket has been ended by the other party" which led me to enabling allowHalfOpen because the docs seem to indicate that it separates the incoming and outgoing FIN packets.
const net = require('net');
const drain = client =>
new Promise(resolve => {
let data = '';
client.on('end', () => {
console.log('end');
resolve(data);
});
client.on('data', part => {
console.log('data');
data += part.toString();
});
});
const server = net.createServer({ allowHalfOpen: true }, async client => {
const req = await drain(client);
client.end(req);
});
server.listen(8124);
This works when I use e.g. echo 'abc' | nc localhost 8124, but I'm not sure whether allowHalfOpen should be necessary here. Is there another way to write shortly after the end of the input stream?
Using netcat instead of curl resolves the issue. e.g. echo 'abc' | nc localhost 8124. This is also more in line with what I need to do anyway since I don't need HTTP for this server.
Related
I have a working Microservice(MS) based on https://docs.nestjs.com/microservices/basics using a TCP protocol. Executing a command from my NestJS API was easy by implementing the #nestjs/microservices Client.
Now im working on a Lambda (AWS) in plain nodeJs, this lambda contains a function that also need to send a command to the MS. I tried using net to create a Socket to send a command (stringified) but it doesn't trigger anything.
my example nodeJs code:
const net = require('net');
const saveProducts = (products) => {
let socket = new net.Socket();
socket.setEncoding('UTF8');
socket.on('data', function (data) {
console.log('ON DATA'); // print out data
console.log(data.toString()); // print out data
});
socket.connect(options.port, options.host, function () {
//called when connection is created
const command = JSON.stringify({ pattern: { cmd: 'update-many', ctrl: 'product' } });
socket.write(command, 'UTF8', function (err) {
console.log(err);
});
});
}
I have used a network sniffer to get an example message structure..
similar issue but the suggestion is only to add #nestjs/microservices, I was wondering how to do it without it.
After some long research found out what the pattern is what you need to send:
[MSG_LEN]#{ pattern: "[PATTERN_STRING]", data: "[DATA]", id: "[ID]" }
Example:
62#{"pattern":"find","id":"ce51ebd3-32b1-4ae6-b7ef-e018126c4cc4"}
The parameter id is for #MessagePattern, without it #EventPattern will be triggered.
I have made a simple server and client program where the server reads the data from file and send to the client through TCP socket But the data I am getting is in object and not a simple string ?
So why I cant see the data as plaintext as it is in my data.txt file.
Explanation with example would be appreciated.
Here is my code :-
SERVER CODE
const fs = require('fs');
const net = require('net');
const readableData = fs.createReadStream('data.txt', 'utf8');
const server = net.createServer(socket => {
socket.on('data', chunk => {
console.log(chunk.toString());
socket.write(JSON.stringify(readableData));
});
socket.on('end', () => {
console.log("done");
})
socket.on('close', () => {
console.log("closed")
})
});
server.listen(3000);
CLIENT CODE
const fs = require('fs');
const net = require('net');
const client = new net.Socket();
client.connect('3000', () => {
console.log("connected");
client.write("Server please send the data");
});
client.on('data', chunk => {
console.log("Data recieved:" + chunk.toString());
});
client.on('finish', () => {
console.log("Work completed");
})
client.on('close', () => {
console.log("connection closed");
})
And here is my data.txt file which has simple data
Hello client how are you ?
And the output I'm getting is here :-
Data recieved:{"_readableState":{"objectMode":false,"highWaterMark":65536,"buffer":{"head":{"data":"Hello client how are you ?","next":null},"tail":{"data":"Hello client how are you ?","next":null},"length":1},"length":26,"pipes":null,"pipesCount":0,"flowing":null,"ended":true,"endEmitted":false,"reading":false,"sync":false,"needReadable":false,"emittedReadable":false,"readableListening":false,"resumeScheduled":false,"paused":true,"emitClose":false,"autoDestroy":false,"destroyed":false,"defaultEncoding":"utf8","awaitDrain":0,"readingMore":false,"decoder":{"encoding":"utf8"},"encoding":"utf8"},"readable":true,"_events":{},"_eventsCount":1,"path":"data.txt","fd":35,"flags":"r","mode":438,"end":null,"autoClose":true,"bytesRead":26,"closed":false}
The question why I won't be able to see the data as plaintext on client side as it is in data.txt file.
Your variable readableData contains a node.js stream object. That's what that variable is. It's only of use in the current node.js instance so it doesn't do anything useful to try to send that stream object to the client.
If you want to get all the data from that 'data.txt' file, you have several choices.
You can just read the whole file into a local variable with fs.readFile() and then send all that data with socket.write().
You can create a new stream attached to the file for each new incoming request and then as the data comes in on the readStream, you can send it out on the socket (this is often referred to as piping one stream into another). If you use higher level server constructs such as an http server, they make piping real easy.
Option #1 would look like this:
const server = net.createServer(socket => {
socket.on('data', chunk => {
console.log(chunk.toString());
fs.readFile('data.txt', 'utf8', (err, data) => {
if (err) {
// insert error handling here
console.log(err);
} else {
socket.write(data);
}
});
});
socket.on('end', () => {
console.log("done");
})
socket.on('close', () => {
console.log("closed")
})
});
FYI, you should also know that socket.on('data', chunk => {...}) can give you any size chunk of data. TCP streams do not make any guarantees about delivering the exact same chunks of data in the same pieces that they were originally sent in. They will come in order, but if you sent three 1k chunks from the other end, they might arrive as three separate 1k chunks, they might arrive as one 3k chunk or they might arrive as a whole bunch of much smaller chunks. How they arrive will often depend upon what intermediate transports and routers they had to travel over and if there were any recoverable issues along that transmission. For example, data sent over a satellite internet connection will probably arrive in small chunks because the needs of the transport broke it up into smaller pieces.
This means that reading any data over a plain TCP connection generally needs some sort of protocol so that the reader knows when they've gotten a full, meaningful chunk that they can process. If the data is plain text, it might be as simple a protocol as every message ends with a line feed character. But, if the data is more complex, then the protocol may need to be more complex.
So I am trying to setup a socket server in node.js using node-ipc, then send data from a client. I can connect perfectly fine, however when I send data I recieve the error Messages are large, You may want to consider smaller messages. I have followed all advice here, however, have still been unsuccessful in resolving the problem. I have tried sending from a socket connection in C and also from terminal. Any advice would be great, thanks in advance.
main.js
const ipc = require("node-ipc");
const socket = '/tmp/edufuse.sock';
ipc.serve(
socket,
function(){
ipc.server.on(
'myEvent',
function(data){
ipc.log('got a message : '.debug, data);
}
);
}
);
ipc.server.start();
test.json
```json
{ type: message, data: { foo: bar } }
command from terminal
pr -tF test.json | nc -U /tmp/edufuse.sock
Unfortunately, it appears this is an underlying problem with node-ipc. In order to get around this, I used net sockets and TCP.
const net = require('net');
const port = 8080;
const host = '127.0.0.1';
var server = net.createServer(function(socket) {
socket.on('data', function(data){
let str = data.toString('utf8');
console.log(str);
try {
let json = JSON.parse(str);
console.log(json);
} catch (e) {
console.log('error str: ' + str);
}
});
socket.on('error', function(err) {
console.log(err)
})
});
server.listen(port, host);
I have a node.js sample where a client socket makes two writes to a server. I'm trying to make sure the server receives the writes one by one, using the socket.write with a callback:
var net = require('net');
const HOST = '127.0.0.1';
const PORT = 7000;
var server = new net.Server(socket => {
socket.on('data', data => {
console.log("Server received: " + data);
})
});
server.listen(PORT, HOST);
var client = new net.Socket();
client.connect(PORT, HOST);
client.write("call 1", "utf8", () => {
client.write("call 2");
});
When I run it I get output:
Server received: call 1call 2
According to the docs here https://nodejs.org/api/net.html#net_socket_write_data_encoding_callback:
... The optional callback parameter will be executed when the data is finally written out...
What does data is finally written out mean? How can I make the server produce:
Server received: call 1
Server received: call 2
Thanks,
Dinko
You are dealing with a stream. It does not know anything about the beginning and end of your messages.
You need to add delimiter (eg \n: client.write("call 2\n");)
You need split data by delimiter on the receiver (eg node split package).
You can set a timeout for the second event.
client.write("call 1", "utf8")
setTimeout(() => {
client.write("call 2");
}, 100);
What i tried to achieve with node.js/io.js, is to send a file from one server to another one via a proxy. To avoid memory buffering i want to use streams.
The proxy should be able to connect to multiple targets dynamically. The target connection information for the proxy should be send prior to the filedata.
With normal socket communication and buffering it is not a problem. But how or in general can this be done with streams??
var net = require('net');
var fs = require('fs');
//create readstream from file
var myFile = fs.createReadStream('E:/sample.tar.gz');
// Proxy server
//####################################################################################################
var proxy = net.createServer(function (socket) {
// Create a new connection to the TCP server
var client = net.connect('9010');
// 2-way pipe between client and TCP server
socket.pipe(client).pipe(socket);
}).listen(9000);
// Targetserver
//####################################################################################################
var server = net.createServer(function (socket) {
// create filestream to write data into file
var destfile = fs.createWriteStream('E:/sample_copy.tar.gz')
socket.on('data', function (buffer) {
console.log('Get data on targetserver...');
// write buffer to file
destfile.write(buffer);
});
socket.on('end', function () {
// release file from writestream
destfile.end();
});
}).listen(9010);
// Client
//####################################################################################################
// Send file to proxy
var client = new net.Socket();
// connect to proxy
client.connect('9000', '127.0.0.1', function () {
console.log('Connection to proxy opened');
});
// send data to proxy
myFile.pipe(client);
// read response from taget
client.on('data', function(data) {
console.log('Response: ' + data);
// close the client socket completely
client.destroy();
});
// Add a 'close' event handler for the client socket
client.on('close', function() {
console.log('Connection to proxy closed');
});
Any hint to a good tutorial is also welcome.
TMOE
socket.write() already uses streams under the hood so you don't need to do anything special. Just send it the usual Buffer object or string and it will use a stream.
From the current source code of io.js, here's what happens when you use socket.write():
Socket.prototype.write = function(chunk, encoding, cb) {
if (typeof chunk !== 'string' && !(chunk instanceof Buffer))
throw new TypeError('invalid data');
return stream.Duplex.prototype.write.apply(this, arguments);
};
And stream is declared like this:
const stream = require('stream');
Apologies if I've misunderstood your question/requirements! By all means, clarify if I have misunderstood you and I'll try again (or delete this answer so it's not a distraction).