NodeJS : Error: read ECONNRESET at TCP.onStreamRead (internal/stream_base_commons.js:111:27) - node.js

Using Polling like below to check if the content of the file is changed then, other two functions are called
var poll_max_date=AsyncPolling(function (end,err) { if(err) {
console.error(err); } var stmp_node_id=fs.readFileSync(path.join(__dirname,'node_id'),"utf8");
console.log("--------loaded node : "+stmp_node_id);
if(druid_stmp_node_id!=stmp_node_id) {
// MAX DATA CUT-OFF DRUID QUERY
druid_exe.max_date_query_fire();
// // DRUID QUERY FOR GLOBAL DATA
druid_exe.global_druid_query_fire();
druid_stmp_node_id=stmp_node_id; }
end(); }, 1800000).run();//30 mins
Its working fine for sometime, but then getting below error like after 4 - 5hours :
events.js:167
throw er; // Unhandled 'error' event
^
Error: read ECONNRESET
at TCP.onStreamRead (internal/stream_base_commons.js:111:27) Emitted 'error' event at:
at emitErrorNT (internal/streams/destroy.js:82:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:50:3)
tried using fs.watch to monitor the changes in the file instead of polling like below :
let md5Previous = null; let fsWait = false;
fs.watch(dataSourceLogFile, (event, filename) => { if (filename) {
if (fsWait) return;
fsWait = setTimeout(() => {
fsWait = false;
}, 1000);
const md5Current = md5(fs.readFileSync(dataSourceLogFile));
if (md5Current === md5Previous) {
return;
}
md5Previous = md5Current;
console.log(`${filename} file Changed`);
// MAX DATA CUT-OFF DRUID QUERY
druid_exe.max_date_query_fire();
// DRUID QUERY FOR GLOBAL DATA
druid_exe.global_druid_query_fire(); } });
Its is also working fine for sometime, but then getting same error like after 4 - 5hours :
events.js:167 throw er; // Unhandled 'error' event ^
Error: read ECONNRESET at TCP.onStreamRead
(internal/stream_base_commons.js:111:27) Emitted 'error' event at: at
emitErrorNT (internal/streams/destroy.js:82:8) at emitErrorAndCloseNT
(internal/streams/destroy.js:50:3)
But when run in Local Machine, its working fine. the error occurs only when run in remote Linux Machine.
somebody can help me how I can fix that problem?

Use fs.watchFile once , because fs.watch is not consistent across platforms,
https://nodejs.org/docs/latest/api/fs.html#fs_fs_watchfile_filename_options_listener
Change your code according to the requirement.

It has been happening since the users are closing the browser before the data request is received, leading to Connection Reset.
Used PM2 (http://pm2.keymetrics.io/) to run the application, and it is working great now .

Related

How to write on process.stdin with nodejs

I have a problem with nodejs when trying to write to stdin of a process I have spawned with this spawn function of child_process
const spawn = require("child_process").spawn;
class Barotrauma {
static instance = null;
server = null;
constructor() {
this.server = spawn(
"F:\\dev\\barotrauma\\steamcmd\\steamapps\\common\\Barotrauma Dedicated Server\\DedicatedServer.exe",
{
stdio: [process.stdin, process.stdout, process.stderr],
}
);
}
static getInstance() {
if (Barotrauma.instance === null) {
Barotrauma.instance = new Barotrauma();
} else {
return Barotrauma.instance;
}
}
sendCommand(command) {
//this.server.stdout.write(`${command}\n`)
// this is a test to get an output on command execution to see if it works
process.stdin.write("help\n");
}
}
module.exports = Barotrauma;
So, this snippet of code is for starting a game server then send command to it on socket events (the socket call sendCommand function)
if I try to write commands in the console it works fine, but if I try to execute the sendCommand function it crash with error :
node:events:498
throw er; // Unhandled 'error' event
^
Error: write EPIPE
at afterWriteDispatched (node:internal/stream_base_commons:160:15)
at writeGeneric (node:internal/stream_base_commons:151:3)
at ReadStream.Socket._writeGeneric (node:net:795:11)
at ReadStream.Socket._write (node:net:807:8)
at writeOrBuffer (node:internal/streams/writable:389:12)
at _write (node:internal/streams/writable:330:10)
at ReadStream.Writable.write (node:internal/streams/writable:334:10)
at Barotrauma.sendCommand (F:\dev\barotrauma\serverManager.js:26:19)
at handleReward (F:\dev\barotrauma\handler.js:4:28)
at WebSocket.connection.onmessage (F:\dev\barotrauma\index.js:26:5)
Emitted 'error' event on ReadStream instance at:
at emitErrorNT (node:internal/streams/destroy:157:8)
at emitErrorCloseNT (node:internal/streams/destroy:122:3)
at processTicksAndRejections (node:internal/process/task_queues:83:21) {
errno: -4047,
code: 'EPIPE',
syscall: 'write'
}
The crash seems to originate from the process.stin.write function.
Any idea how to solve this?
You might want to refer to options.stdio.
Without further depth into the full context it's hard to say what's exactly going on, but the following might work as you'd expect:
If you really need the parent and children fds to be connected you could use pipe for the stdio option and have data be propagated between the two processes.
The following snippet (reduced to minimize noise) should do the trick:
class Barotrauma {
server = null;
constructor() {
// Default `stdio` option is `pipe` for fds 0, 1 and 2 (stdio, stdout and stderr)
this.server = spawn(
"F:\\dev\\barotrauma\\steamcmd\\steamapps\\common\\Barotrauma Dedicated Server\\DedicatedServer.exe"
);
// Catches incoming messages from child, if necessary
this.server.stdout.on('data', (d) => {
console.log(`Message from child: '${d}'`)
})
}
sendCommand(cmd) {
// Sends messages to child
this.server.stdin.write(`${cmd}\n`);
}
}
Given you're in a Windows environment, mind you might want to use overlapped instead of pipe if you need async I/O to communicate with that application. For more info, refer to the official docs.

Why am I getting "unexpected end of file" with zlib.js?

I'm using the unzipper Node.js package and it was working fine, processing through my files, when suddenly for seemingly no reason, I got this error:
events.js:291
throw er; // Unhandled 'error' event
^
Error: unexpected end of file
at Zlib.zlibOnError [as onerror] (zlib.js:180:17)
Emitted 'error' event on Parse instance at:
at emitErrorNT (internal/streams/destroy.js:106:8)
at emitErrorCloseNT (internal/streams/destroy.js:74:3)
at processTicksAndRejections (internal/process/task_queues.js:80:21) {
errno: -5,
code: 'Z_BUF_ERROR'
}
The error gets thrown every time this particular file is processed, others seem to work fine, but I can't think of what's different about this one.
Is this potentially a user-mistake, or a bug I should report?
code:
console.log("unzipping", zipPath) // <-- error triggers after this log
fs.createReadStream(zipPath)
.pipe(unzipper.Parse())
.on('entry', function(entry) {
entry.pipe(fs.createWriteStream(csvPath)).on('finish', () => {
console.log("done unzipping", zipPath)
fd = fs.openSync(csvPath, "r+")
initializeFile()
})
})
}

How to fix 'events.js :167 error Error: connect ECONNREFUSED 127.0.0.1:443' in Node.js when no other apps seems to be attempting to use the port?

I'm getting the error described below when running my node.js app after perfoming a few api calls.
The error does not always show in the exactly same place/line of code. But most of the times it is at the end of the api call.
events.js:167
throw er; // Unhandled 'error' event
^
Error: connect ECONNREFUSED 127.0.0.1:443
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1113:14)
Emitted 'error' event at:
at TLSSocket.socketErrorListener (_http_client.js:391:9)
at TLSSocket.emit (events.js:182:13)
at emitErrorNT (internal/streams/destroy.js:82:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:50:3)
at process._tickCallback (internal/process/next_tick.js:63:19)
Based on similar questions here at SO my hypothesis is that a) there is something using 127.0.0.1:443 and therefore conflicting with my app or b) node is trying to use 127.0.0.1:443 but there is nothing there for it to use (my app is listening to localhost :3000).
Hyphothesis a) doesn't seem likely since after running netstat -ano | findstr 127.0.0.1:443 nothing shows up (when app is running and right after it terminates).
Also killed every node.exe and mongod.exeb using any port in my computer, closed the terminal and restarted the node app without success.
In case error is related with hypothesis b) I'm not sure how to address it.
api.post('/parsePOpdf', wagner.invoke(function(Pdfeq, Pdfdocspec, Product, User, Order){
return async function(req,res){
//... some code
pdfParser.on("pdfParser_dataError", errData => console.error(errData.parserError) );
pdfParser.on("pdfParser_dataReady", async function(pdfData) {
fs.writeFile("./test.json", JSON.stringify(pdfData), function(err){
console.log(err);
});
let pages = pdfData.formImage.Pages;
//console.log('pages 557', pages);
let order = {
orderDetails : {
supplier : [{
item : []
}]
}
};
for (const page of pages){
let value = await getItemsInPDF(page, productKeys, pdfParsingDetails, order, Product, customer, supplierLink, User);
//... more code
order = value;
}
return res.json(order);
});
pdfParser.loadPDF(pdfFile);
}
}));
I would expect the code to finish without throwing this error.
It turns out that the problem was in the api code: an http.get line to fetch a remote file was generating the conflict. This makes sense since the error was not present for other endpoints of the api.
So learning is that if the terminal reports no app using the suspected conflicting port (see question) answser should be within the same code and you need to go line by line to identify which one is causing the problem (instead of focusing on other apps trying to use the same port, like I was focusing on).

RESTIFY: Error: socket hang up] code: 'ECONNRESET' on multiple requests

I am implementing a node app, which brings in order details from BigCommerce.
Multiple calls are made to BigCommerce API asynchronously using Restify JsonClient.
It works fine for some calls but after that i gives error: [Error: socket hang up] code: 'ECONNRESET', sslError: undefined, body: {}
I have tried turning off socket pooling ie by setting agent=false, but it still gives same error.
Following is code which makes call to BigCommerce API
makeRequest = function (url, params, headers, orderDetails, cb) {
var options = {
headers: headers
};
var client = restify.createJsonClient({
url: url
});
client.get(options, function(err, req, res, obj) {
if(err){
console.log(err);
cb(err,obj);
} else if(obj != null) {
var result = obj;
if(orderDetails == null) {
cb(null,result);
} else {
cb(null, result , orderDetails);
}
}
});
};
I get following error:
{ [Error: socket hang up] code: 'ECONNRESET', sslError: unde
fined, body: {} } Error: socket hang up
at SecurePair.error (tls.js:993:23)
at EncryptedStream.CryptoStream._done (tls.js:689:22)
at CleartextStream.read [as _read] (tls.js:490:24)
at CleartextStream.Readable.read (_stream_readable.js:320:10)
at EncryptedStream.onCryptoStreamFinish (tls.js:301:47)
at EncryptedStream.g (events.js:175:14)
at EncryptedStream.EventEmitter.emit (events.js:117:20)
at finishMaybe (_stream_writable.js:352:12)
at endWritable (_stream_writable.js:359:3)
at EncryptedStream.Writable.end (_stream_writable.js:337:5)
at EncryptedStream.CryptoStream.end (tls.js:628:31)
at Socket.onend (_stream_readable.js:483:10)
Why am i getting such error? How can i handle it?
Thanks
I just wanted to make sure that you're setting the agent setting in the right area.
Include the
"agent": false
in your options. (It's not set in the options in the code you pasted)
Per gfpacheco in the comments here: https://github.com/restify/node-restify/issues/485
By default NodeJS uses agents to keep the TCP connection open, so you can reuse it.
The problem is that if the server is closed, or it closes your connection for whatever reason you get the ECONNRESET error.
To close the connection every time you just need to set agent: false in your client creation
I've tried this solution and it worked for me.
Other than that, the
"secureOptions": "constants.SSL_OP_NO_TLSv1_2"
solution posted here sounds like it could be the right path, since you're getting an sslError.
Maybe you are running into this issue https://github.com/joyent/node/issues/5360
TL;DR: You could try with latest node version and secureOptions: constants.SSL_OP_NO_TLSv1_2 added to your options.

ForEachLine() in node.js

Referring to slide no 35 in ppt on slideshare
When I run this code
var server = my_http.createServer();
server.on("request", function(request,response){
var chunks = [];
output = fs.createWriteStream("./output");
request.on("data",function(chunk){
chunks = forEachLine(chunks.concat(chunk),function(line){
output.write(parseInt(line,10)*2);
output.write("\n");
})
});
request.on("end",function(){
response.writeHeader(200,{"Content-Type":"plain/text"})
response.end("OK\n");
output.end()
server.close()
})
});
server.listen("8080");
I get error as
chunks = forEachLine(chunks.concat(chunk),function(line){
^
ReferenceError: forEachLine is not defined
Of course I unserstand that I need to include some library but when I googled this I found nothing . Since I am complete newbie to this I have absolutely no idea how to resolve it.
Any suggestions will be appreciable.
EDIT
Using the suggested answer I am getting error as
events.js:72
throw er; // Unhandled 'error' event
^
TypeError: Invalid non-string/buffer chunk
at validChunk (_stream_writable.js:150:14)
at WriteStream.Writable.write (_stream_writable.js:179:12)
at /var/www/html/experimentation/nodejs/first.js:18:20
at Array.forEach (native)
at forEachLine (/var/www/html/experimentation/nodejs/first.js:8:60)
at IncomingMessage.<anonymous> (/var/www/html/experimentation/nodejs/first.js:17:18)
at IncomingMessage.EventEmitter.emit (events.js:95:17)
at IncomingMessage.<anonymous> (_stream_readable.js:736:14)
at IncomingMessage.EventEmitter.emit (events.js:92:17)
at emitReadable_ (_stream_readable.js:408:10)
Thanks
See proxy_stream.js
function forEachLine(chunks, callback) {
var buffer = chunks.join("")
buffer.substr(0, buffer.lastIndexOf("\n")).split("\n").forEach(callback)
return buffer.substr(buffer.lastIndexOf("\n") + 1).split("\n")
}
The link to the repo was on the first slide.
EDIT BY LET's CODE FOR ERROR MESSAGE
Came to know the actual issue now .
I was using nod v0.10 and it is buggy in getting the streams so I was getting the error. Downgraded to v0.8 and same code is working perfect .

Resources