axios post socket hang up only in docker - node.js

I have a server that takes upwards of 10 minutes to begin responding to a request. I have a nodejs client that uses axios to send a post request to this server. I have configured axios timeout to be 30 minutes.
When running the code directly on my machine, axios correctly waits for the response, and after 10 minutes I receive a 200 OK. When the same script is run in docker (node:10 base image), after 5.5 minutes I receive a socket hang up error.
server:
const http = require('http')
const server = http.createServer(function (request, response) {
if (request.method == 'POST') {
var body = ''
request.on('data', function (data) {
body += data
console.log('Partial body: ' + body)
})
request.on('end', function () {
setTimeout(() => {
response.writeHead(200, { 'Content-Type': 'text/html' })
response.end('post received')
}, 1000 * 60 * 10);
});
} else {
response.writeHead(200, { 'Content-Type': 'text/plain' });
response.end('Ok');
}
})
const port = 5000
const host = '10.0.0.50'
server.setTimeout(1000 * 60 * 30);
server.listen(port, host)
console.log(`Listening at http://${host}:${port}`)
client:
let axios = require('axios');
async function run () {
const axiosInstance = axios.create({
baseURL: 'http://livedoc.transmissionmedia.ca/',
timeout: (1000 * 60 * 30),
});
axiosInstance.defaults.timeout = (1000 * 60 * 30);
console.log(`${new Date().toISOString()} - start`);
// const resp = await axiosInstance.post(`http://10.0.0.50:5000/weatherforecast`);
const resp = await axiosInstance({
method: 'post',
url: `http://10.0.0.50:5000/weatherforecast`,
timeout: 1000 * 60 * 30
});
// const resp = await axiosInstance.post(`http://10.0.0.50:5000/weatherforecast`);
console.log(`${new Date().toISOString()} - end`);
}
run()
.then(() => { console.error('Succeeded!'); process.exit(0); })
.catch(err => { console.error('Failed!'); console.error(err); process.exit(-1); });
When the client script is run in a docker container, I receive the following error after about 5.5 minutes:
{ Error: socket hang up
at createHangUpError (_http_client.js:332:15)
at Socket.socketOnEnd (_http_client.js:435:23)
at Socket.emit (events.js:203:15)
at endReadableNT (_stream_readable.js:1145:12)
at process._tickCallback (internal/process/next_tick.js:63:19)
code: 'ECONNRESET',
...
}

I don't know if this is the solution to your problem, but I was having a socket hangup issue with API calls only in docker, running on Google Compute Engine.
When I executed the code directly it was fine, when I ran the same code in docker, I would get socket hangup errors (ERRCONNRESET) if the payload was larger than some seemingly arbitrary but not fixed size.
In the end, the problem was that the MTU of the default docker bridge network didn't match the MTU of the host network.
I also tried --network=host but I got the error:
"cannot run with network enabled in root network namespace: unknown"
Which apparently no-one else has ever seen.
In the end, I followed the advice here on how to configure my docker bridge network MTU:
https://mlohr.com/docker-mtu/
"With the command ip link you can display the locally configured network cards and their MTU:"
"If the outgoing interface (in this case ens3) has an MTU smaller than 1500, some action is required. If it is greater than or equal to 1500, this problem does not apply to you."
I edited my /etc/docker/daemon.json file to look like this:
{
"runtimes": {
"runsc": {
"path": "/usr/bin/runsc"
}
},
"mtu": 1460
}
where 1460 was the MTU of the host network interface, and the problem was solved!
Hope this helps you or someone else it was doing my head in.

Related

Cloud Run Readablestream takes 5 minutes to cancel enqueue

I created an adapter-node Sveltekit API endpoint, which streams quotes using a readable stream. When I quit the client route The streaming has to stop. This works fine in development using Sveltekit "npm run dev" (vite dev) or using a windows desktop container (node build).
onDestroy(async () => {
await reader.cancel(); // stop streaming
controller.abort(); // signal fetch abort
});
But when I build and deploy the node container on Google Cloud Run the streaming works fine. Except when I quit the client route: the API endpoint keeps on streaming. The log shows: enqueus for 5 more minutes followed by a delayed Readablestream cancel() on the API server.
Why this 5 minutes between the client cancel / abort and the cancel on the server?
The API +server.js
import { YahooFinanceTicker } from "yahoo-finance-ticker";
/** #type {import('./$types').RequestHandler} */
export async function POST({ request }) {
const { logging, symbols } = await request.json();
const controller = new AbortController();
const ticker = new YahooFinanceTicker();
ticker.setLogging(logging);
if (logging) console.log("api ticker", symbols);
const stream = new ReadableStream({
start(controller) {
(async () => {
const tickerListener = await ticker.subscribe(symbols);
tickerListener.on("ticker", (quote) => {
if (logging) console.log("api", JSON.stringify(quote, ["id", "price", "changePercent"]));
controller.enqueue(JSON.stringify(quote, ["id", "price", "changePercent"]));
});
})().catch(err => console.error(`api listen exeption: ${err}`));
},
cancel() { // arrives after 5 minutes !!!
console.log("api", "cancel: unsubscribe ticker and abort");
ticker.unsubscribe();
controller.abort();
},
});
return new Response(stream, {
headers: {
'content-type': 'text/event-stream',
}
});
}
Route +page.svelte
const controller = new AbortController();
let reader = null;
const signal = controller.signal;
async function streaming(params) {
try {
const response = await fetch("/api/yahoo-finance-ticker", {
method: "POST",
body: JSON.stringify(params),
headers: {
"content-type": "application/json",
},
signal: signal,
});
const stream = response.body.pipeThrough(new TextDecoderStream("utf-8"));
reader = stream.getReader();
while (true) {
const { value, done } = await reader.read();
if (logging) console.log("resp", done, value);
if (done) break;
... and more to get the quotes
}
} catch (err) {
if (!["AbortError"].includes(err.name)) throw err;
}
}
...
The behavior you are observing is expected, Cloud Run does not support client-side disconnects yet.
It is mentioned in this article, that
Cloud Run (fully managed) currently only supports server-side
streaming. Having only "server-side streaming" basically means when
the "client" disconnects, "server" will not know about it and will
carry on with the request. This happens because "server" is not
connected directly to the "client" and the request from the "client"
is buffered (in its entirety) and then sent to the "server".
You can also check this similar thread
It is a known issue, there is already a public issue exists for the same. You can follow that issue for future updates and also add your concerns there.

Increase Headers Timeout in express

in order to access a Swagger UI based API I wrote some code.
app.get('/getData', async (req, res)=>{
token = await getToken().then(res =>{return res})
async function getData() {
return fetch(dataurl, {
method: 'GET',
headers: {
accept: 'application/json;charset=UTF-8',
authorization: 'Bearer ' + token.access_token
}
})
.then(res => res.json())
.catch(error => console.error('Error:', error));
}
const result = await getData().then(res =>{return res})
res.json(result)
})
The issue I have is that some requests will take about 10 minutes to finish since the data that gets accessed is very large and it just takes that time. I can't change that.
But after exactly 300 seconds I get "Headers Timeout Error" (UND_ERR_HEADERS_TIMEOUT).
I'm not sure where the 300 seconds come from. On the Swagger UI API the time is set to 600 seconds.
I think it's the standard timeout from express / NodeJS.
const port = 3000
const server = app.listen(port,()=>{ console.log('Server started')})
server.requestTimeout = 610000
server.headersTimeout = 610000
server.keepAliveTimeout = 600000
server.timeout = 600000
As you can see tried to increase all timeouts for express to about 600 seconds but nothing changes.
I also changed the network.http.response.timeout in Firefox to 600 seconds.
But still after 300 seconds I get "Headers Timeout Error".
Can anybody help me where and how I can increase the timeout for the request to go through?
Have you tried using the connect-timeout library?
npm install connect-timeout
//...
var timeout = require('connect-timeout');
app.use(timeout('600s'));
Read more here: https://www.npmjs.com/package/connect-timeout#examples

Client Timeout to NodeJS on Docker in EC2 behind AWS ALB

I have a Koajs node app in a docker container on an EC2 instance. The app is behind an AWS Application Load Balancer.
The app simply takes a POSTed file and responds with a stream that the client can view events on.
So my server is doing the right thing (sending file data), and my client is doing the right thing (receiving file data and sending back progress), but the ALB is timing out. I don't understand why it's timing out. Both client and server are sending and receiving data to/from each other, so I would think that would qualify as keep alive traffic.
Here's the code that each is running.
Client:
const request = require('request-promise');
const fs = require('fs');
const filePath = './1Gfile.txt';
const file = fs.createReadStream(filePath);
(async () => {
// PUT File
request.put({
uri: `http://server/test`,
formData: { file },
headers: { Connection: 'keep-alive' },
timeout: 200000,
})
.on('data', (data) => {
const progressString = data.toString();
console.log({ progressString });
});
})();
Server:
const { Readable } = require('stream');
const Koa = require('koa');
const router = require('koa-router')();
(async () => {
const app = module.exports = new Koa();
router.get('/healthcheck', async (ctx) => {
ctx.status = 200;
});
router.put('/test', test);
async function test(ctx) {
const read = new Readable({
objectMode: true,
read() { },
});
ctx.body = read;
let i = 1;
setInterval(() => {
read.push(`${process.hrtime()}, ${i}`);
ctx.res.write('a');
i++;
}, 3000);
}
app.use(router.routes());
app.use(router.allowedMethods());
app.listen(3000, (err) => {
if (err) throw err;
console.info(`App started on port 3000 with environment localhost`);
});
})();
Both server and client are logging the correct things, but the ALB just times out at whatever I set it's idle timeout to. Is there some trick to tell the ALB that traffic is really flowing?
Thanks so much for any light you can shed on it.
Just a quick guess, you need to enable keepAlive when using the request-promise. add forever: true in options. Try this:
request.put({
uri: `http://server/test`,
formData: { file },
headers: { Connection: 'keep-alive' },
timeout: 200000,
forever: true,
})
We have a similar issue about timeout when using request-promise-native. We fixed by adding this option. Hopfully it works out for you.

Timeout handling with node.js stream piping

I'm piping to a file an HTTPS request, it works ok 99.9% of calls, but occasionally (maybe when server or network are not available) hangs indefinitely...
This obviously cause my application to stop working and requiring a manual restart...
I have other https connections that used to occasionally hang that always complete now using the following error code on the request object, as suggested on node documentation:
request.on('socket', function(socket) {
socket.setTimeout(10000);
socket.on('timeout', function() { request.abort(); });
});
request.on('error', function(e) {
// Handle the error...
console.error("FAILED!");
});
... but it seems that timeouts on the request are ignored if the destination is piped to a file stream, maybe I should handle an error with a timeout on the filesystem object, but the documentation is not clear if there is an event I have to wait for except for 'finish'...
Here is the sample code, I hope someone can help me:
var https = require('https'),
fs = require('fs');
var opts = {
host: 'www.google.com',
path: '/',
method: 'GET',
port: 443
};
var file = fs.createWriteStream('test.html');
var request = https.request(opts, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function(){
console.log("OK!");
});
});
});
request.on('socket', function(socket) {
socket.setTimeout(10000);
socket.on('timeout', function() { request.abort(); });
});
request.on('error', function(e) {
console.error("FAILED!");
});
request.end();
If you wanna try the hang, change host and path with a huge file and disconnect the network cable during the transfer, it should time out after 10 seconds, but it doesn't...
I set up a demo node.js http server that sends a very slow answer and a client similar to your sample code.
When I start the client and then stop the server while sending the response then I also don't get a timeout event on the socket but I get a end event on the response within the client:
var request = https.request(opts, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function(){
console.log("OK!");
});
});
response.on('end', function() {
// this is printed when I stop the server
console.log("response ended");
});
});
```
Maybe you could listen to that event?

Catching ECONNREFUSED in node.js with http.request?

I'm trying to catch ECONNREFUSED errors when using a HTTP client in node.js. I'm making requests like this:
var http = require('http');
var options = { host: 'localhost', port: '3301', path: '/', method: 'GET' };
http.request(options).on('response', function (res) {
// do some stuff
});
I can't figure out how to catch this error:
Error: connect ECONNREFUSED
at errnoException (net.js:614:11)
at Object.afterConnect [as oncomplete] (net.js:605:18)
If I do request.on('error', function () {});, it doesn't catch it. If I do it like this:
var req = request.on(etc)
req.on('error', function blah () {});
Then I get TypeError: Object false has no method 'on'.
Do I really have to do a top-level uncaught error thing to deal with this? At the moment whatever I do my whole process quits out.
Edit: I found some blog posts on how to do it by creating a connection object, calling request on that, and then binding to errors on the connection object, but doesn't that make the entire http.request() shortcut useless?
Any reason you're not using http://nodejs.org/docs/v0.6.5/api/http.html#http.request as your base? Try this:
var req = http.request(options, function(res) {
// Bind 'data', 'end' events here
});
req.on('error', function(error) {
// Error handling here
});
req.end();
Each call to http.request() returns its self.
So try it like this...
http.request(options.function(){}).on('error',function(){}).end();
I've got a solution for this, having tried all the suggestions on this (and many other) pages.
My client needs to detect a turnkey product that runs embedded windows. The client is served from a different machine to the turnkey.
The turnkey can be in 3 states:
turned off
booted into windows, but not running the turnkey app
running the turnkey app
My client sends a 'find the turnkey product' GET message to my nodejs/express service, which then tries to find the turnkey product via http.request. The behavior for each of the 3 use cases are;
timeout
ECONNREFUSED - because the windows embedded phase of the turnkey is
refusing connections.
normal response to request (happy day scenario)
The code below handles all 3 scenarios. The trick to catching the ECONNREFUSED event was learning that its handler binds to the socket event.
var http = require('http');
var express = require('express');
var url = require('url');
function find (req, res) {
var queryObj = url.parse(req.url, true).query;
var options = {
host: queryObj.ip, // client attaches ip address of turnkey to url.
port: 1234,
path: '/some/path',
}; // http get options
var badNews = function (e) {
console.log (e.name + ' error: ', e.message);
res.send({'ok': false, 'msg': e.message});
}; // sends failure messages to log and client
// instantiate http request object and fire it
var msg = http.request(options, function (response) {
var body = '';
response.on ('data', function(d) {
body += d;
}); // accumulate response chunks
response.on ('end', function () {
res.send({'ok': true, 'msg': body});
console.log('sent ok');
}); // done receiving, send reply to client
response.on('error', function (e) {
badNews(e);
}); // uh oh, send bad news to client
});
msg.on('socket', function(socket) {
socket.setTimeout(2000, function () { // set short timeout so discovery fails fast
var e = new Error ('Timeout connecting to ' + queryObj.ip));
e.name = 'Timeout';
badNews(e);
msg.abort(); // kill socket
});
socket.on('error', function (err) { // this catches ECONNREFUSED events
badNews(err);
msg.abort(); // kill socket
});
}); // handle connection events and errors
msg.on('error', function (e) { // happens when we abort
console.log(e);
});
msg.end();
}
For those not using DNS (you can also use request instead of get by simply replacing get with request like so: http.request({ ... })):
http.get({
host: '127.0.0.1',
port: 443,
path: '/books?author=spongebob',
auth: 'user:p#ssword#'
}, resp => {
let data;
resp.on('data', chunk => {
data += chunk;
});
resp.on('end', () => console.log(data));
}).on('error', err => console.log(err));

Resources