I have a Koajs node app in a docker container on an EC2 instance. The app is behind an AWS Application Load Balancer.
The app simply takes a POSTed file and responds with a stream that the client can view events on.
So my server is doing the right thing (sending file data), and my client is doing the right thing (receiving file data and sending back progress), but the ALB is timing out. I don't understand why it's timing out. Both client and server are sending and receiving data to/from each other, so I would think that would qualify as keep alive traffic.
Here's the code that each is running.
Client:
const request = require('request-promise');
const fs = require('fs');
const filePath = './1Gfile.txt';
const file = fs.createReadStream(filePath);
(async () => {
// PUT File
request.put({
uri: `http://server/test`,
formData: { file },
headers: { Connection: 'keep-alive' },
timeout: 200000,
})
.on('data', (data) => {
const progressString = data.toString();
console.log({ progressString });
});
})();
Server:
const { Readable } = require('stream');
const Koa = require('koa');
const router = require('koa-router')();
(async () => {
const app = module.exports = new Koa();
router.get('/healthcheck', async (ctx) => {
ctx.status = 200;
});
router.put('/test', test);
async function test(ctx) {
const read = new Readable({
objectMode: true,
read() { },
});
ctx.body = read;
let i = 1;
setInterval(() => {
read.push(`${process.hrtime()}, ${i}`);
ctx.res.write('a');
i++;
}, 3000);
}
app.use(router.routes());
app.use(router.allowedMethods());
app.listen(3000, (err) => {
if (err) throw err;
console.info(`App started on port 3000 with environment localhost`);
});
})();
Both server and client are logging the correct things, but the ALB just times out at whatever I set it's idle timeout to. Is there some trick to tell the ALB that traffic is really flowing?
Thanks so much for any light you can shed on it.
Just a quick guess, you need to enable keepAlive when using the request-promise. add forever: true in options. Try this:
request.put({
uri: `http://server/test`,
formData: { file },
headers: { Connection: 'keep-alive' },
timeout: 200000,
forever: true,
})
We have a similar issue about timeout when using request-promise-native. We fixed by adding this option. Hopfully it works out for you.
Related
I really can't find an answer in the internet so I want to ask here. My socket io works, cause I can display data from the backend on the first load of my page, but when I add new data, I still have to refresh my page so that my frontend would be updated, it's not yet real time. I use express router/rest api to add new data to the database. And I also want to ask, why I have to add { transports: ['websocket', 'polling', 'flashsocket'] } on my frontend? I see others do it without the transports thing, but when I do CORS error occurs. Thank you!
This is my React JS code on file App.js. (I didn't include the const App = () => {....}, but the state and useEffect is inside of the const App)
import io from "socket.io-client";
const socket = io("http://localhost:3001", { transports: ['websocket', 'polling', 'flashsocket'] });
const [rooms,setRooms] = useState([]);
useEffect(() => {
socket.emit("rooms");
socket.on("rooms", rooms=> {
setRooms(rooms);
})
},[])
This is my app.js (node/express)
const http = require('http').createServer(app);
const io = require("socket.io")(http);
const viewRooms = require("./events/rooms");
const onConnection = (socket) => {
viewRooms(io,socket);
}
io.on("connection",onConnection);
This is rooms.js file on my events folder
const Rooms= require("./../models/Rooms");
module.exports = (io,socket) => {
const view = () => {
Rooms.find()
.then(rooms=> {
io.emit("rooms",rooms);
})
}
socket.on("rooms",view);
}
The submit function I use to add data to the database
const submitHandle = (e) => {
e.preventDefault();
const formData = new FormData();
formData.append('name',addForm.name);
addForm.description.forEach((val,key) => {
formData.append("article[" + key + "]" + "[" + Object.keys(val) + "]",val.paragraph);
})
addForm.images.forEach(val => {
formData.append("image",val);
})
formData.append('date',addForm.date);
let token = "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjYwNDU2ZmNhNTI3ZTdhMGEwODY0NjVjNSIsImlhdCI6MTYxODI3NzIyNX0.w6eBHJC72xo-NRPtzJ3gKu_hIY70eCk_-K3-pkO4bAc";
fetch("http://localhost:3001/rooms/upload",{
method : "POST",
body : formData,
headers : {
"Authorization" : token
}
})
.then(data => data.json())
.then(rooms=> {
console.log(rooms);
alert(rooms.message);
})
}
And this is my code on my express route (post)
const router = require("express").Router();
const Rooms = require("./../models/Rooms ");
const auth = require("./../authorization");
const passport = require("passport");
const multer = require("multer");
require("./../passport-setup");
// IMAGE DESTINATION AND FILENAME
const storage = multer.diskStorage({
destination : (req,file,cb) => {
cb(null,"public/images")
},
filename : (req,file,cb) => {
cb(null, Date.now() + "-" + file.originalname)
}
})
// upload
const upload = multer({ storage : storage});
// upload rooms
router.post("/upload",upload.array("image",10),passport.authenticate("jwt",{session : false}),auth,(req,res,next) => {
let allDescription = req.body.description;
req.body.images = req.files.map(file => {
return(
{
roomName : req.body.name,
image : "/public/" + file.filename
}
)
})
Rooms .create(req.body)
.then(rooms=> {
res.send({
message : "Rooms uploaded!",
success : true,
rooms
})
})
.catch(next);
})
For the first part of your question, we'd have to see a lot more logging info to now exactly what is going on. It seems possible that your client-side socket.emit("news"); is perhaps happening too early before your socket.io connection is fully established and perhaps before the server is ready for the incoming request. You can fully log every single socket.io event and probably determine what's going on.
And I also want to ask, why I have to add { transports: ['websocket', 'polling', 'flashsocket'] } on my frontend?
If you don't list websocket as the first transport, then socket.io starts with a few http polling requests and those http requests are subject to CORS. The webSocket transport is not subject to CORs, so if you force it to use that first, then no CORs. FYI, the http polling that socket.io uses is only really there to detect situations where the webSocket transport is not supported or is blocked. If you aren't concerned about that, then you can really just do this in the front-end:
{ transports: ['websocket'] }
Or, you could remove the transports option completely from the client and then implement support for CORs on your server so that the CORs polling request will be allowed.
I am working on creating a zip of multiple files on the server and stream it to the client while creating. Initially, I was using ArchiverJs It was working fine if I was appending buffer to it but it fails when I need to add streams into it. Then after having some discussion on Github, I switched to Node zip-stream which started working fine thanks to jntesteves. But as I deploy the code on GKE k8s I Started getting Network Failed errors for huge files.
Here is my sample code :
const ZipStream = require("zip-stream");
/**
* #summary Adding readable stream provided by https module into zipStreamer using entry method
*/
const handleEntryCB = ({ readableStream, zipStreamer, fileName, resolve }) => {
readableStream.on("error", () => {
console.error("Error while listening readableStream : ", error);
resolve("done");
});
zipStreamer.entry(readableStream, { name: fileName }, error => {
if (!error) {
resolve("done");
} else {
console.error("Error while listening zipStream readableStream : ", error);
resolve("done");
}
});
};
/**
* #summary Handling downloading of files using native https, http and request modules
*/
const handleUrl = ({ elem, zipStreamer }) => {
return new Promise((resolve, reject) => {
let fileName = elem.fileName;
const url = elem.url;
//Used in most of the cases
if (url.startsWith("https")) {
https.get(url, readableStream => {
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
});
} else if (url.startsWith("http")) {
http.get(url, readableStream => {
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
});
} else {
const readableStream = request(url);
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
}
});
};
const downloadZipFile = async (data, resp) => {
let { urls = [] } = data || {};
if (!urls.length) {
throw new Error("URLs are mandatory.");
}
//Output zip name
const outputFileName = `Test items.zip`;
console.log("Downloading using streams.");
//Initialize zip-stream instance
const zipStreamer = new ZipStream();
//Set headers to response
resp.writeHead(200, {
"Content-Type": "application/zip",
"Content-Disposition": `attachment; filename="${outputFileName}"`,
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS"
});
//piping zipStreamer to the resp so that client starts getting response
//as soon as first chunk is added to the zipStreamer
zipStreamer.pipe(resp);
for (const elem of urls) {
await handleUrl({ elem, zipStreamer });
}
zipStreamer.finish();
};
app.post(restPrefix + "/downloadFIle", (req, resp) => {
try {
const { data } = req.body || {};
downloadZipFile(data, resp);
} catch (error) {
console.error("[FileBundler] unknown error : ", error);
if (resp.headersSent) {
resp.end("Unknown error while archiving.");
} else {
resp.status(500).end("Unknown error while archiving.");
}
}
});
I tested for 7-8 files of ~4.5 GB each on local, it works fine and when I tried the same on google k8s, I got network failed error.
After some more research, I Increased server timeout on k8s t0 3000 seconds, than it starts working fine, but I guess the increasing timeout is not good.
Is there anything I am missing on code level or can you suggest some good GKE deployment configuration for a server that can download large files with many concurrent users?
I am stuck on this for the past 1.5+ months. please help!
Edit 1: I edited the timeout in the ingress i.e Network services-> Load Balancing ->edit the timeout in the service
I picked up some old stream code recently (written when 8.x was LTS) and attempted to update it to 12.x. This led to an interesting break in the way I dealt with ENOENT file errors.
Here's a simplification:
const { createServer } = require('http')
const { createReadStream } = require('fs')
const PORT = 3000
const server = createServer((req, res) => {
res.writeHead(200, {
'Content-Type': 'application/json'
})
const stream = createReadStream(`not-here.json`, {encoding: 'utf8'})
stream.on('error', err => {
stream.push(JSON.stringify({data: [1,2,3,4,5]}))
stream.push(null)
})
stream.pipe(res)
})
server.listen(PORT)
server.on('listening', () => {
console.log(`Server running at http://localhost:${PORT}/`)
})
In Node 8, the above code works fine. I'm able to intercept the error, write something to the stream and let it close normally.
In Node 10+ (tested 10, 12, and 13) the stream is already destroyed when my error callback is called. I can't push new things on the stream and handle the error gracefully for the client side.
Was this an intentional change and can I still handle this error in a nice way for the clint side?
One possibility. Open the file yourself and only create the stream with that already successfully opened file. That will allow you to handle ENOENT (or any other errors upon opening the file) before you get into the messy stream error handling mechanics. The stream architecture seems most aligned with aborting upon error, not recovering with some alternate behavior.
const { createServer } = require('http');
const fs = require('fs');
const PORT = 3000;
const server = createServer((req, res) => {
res.writeHead(200, {'Content-Type': 'application/json'});
fs.open('not-here.json', {encoding: 'utf8'}, (err, fd) => {
if (err) {
// send alternative response here
res.end(JSON.stringify({data: [1,2,3,4,5]}));
} else {
const stream = fs.createReadStream(null, {fd, encoding: 'utf8'});
stream.pipe(res);
}
});
});
server.listen(PORT);
server.on('listening', () => {
console.log(`Server running at http://localhost:${PORT}/`)
});
You could also try experimenting with the autoDestroy or autoClose options on your stream to see if any of those flags will allow the stream to still be open for you to push data into it, even if the file created an error opening or reading. The doc on those flags is not very complete so some combination of programming experiements and studying the code would be required to see if they could be manipulated to still add data to the stream after your stream got an error.
The answer by jfriend00 pointed me in the right direction.
Here are two different ways I solved this. I wanted a function that returned a stream rather than handle the error in the req handler function. This is more like what I'm actually doing in real code.
Handling error from stream:
Just like above except I took care to manually destroy the stream. Does this correctly take care of the internal file descriptor? I think it does.
const server = createServer((req, res) => {
res.writeHead(200, {
'Content-Type': 'application/json'
})
getStream().pipe(res)
})
function getStream() {
const stream = createReadStream(`not-here.json`, {
autoClose: false,
encoding: 'utf8'
})
stream.on('error', err => {
// handling "no such file" errors
if (err.code === 'ENOENT') {
// push JSON data to stream
stream.push(JSON.stringify({data: [1,2,3,4,5]}))
// signal the end of stream
stream.push(null)
}
// destory/close the stream regardless of error
stream.destroy()
console.error(err)
})
return stream
}
Handling the error during file open:
Like jfriend00 suggests.
const { promisify } = require('util')
const { Readable } = require('stream')
const { open, createReadStream } = require('fs')
const openAsync = promisify(open)
const server = createServer(async (req, res) => {
res.writeHead(200, {
'Content-Type': 'application/json'
})
const stream = await getStream()
stream.pipe(res)
})
async function getStream() {
try {
const fd = await openAsync(`not-here.json`)
return createReadStream(null, {fd, encoding: 'utf8'})
} catch (error) {
console.log(error)
// setup new stream
const stream = new Readable()
// push JSON data to stream
stream.push(JSON.stringify({data: [1,2,3,4,5]}))
// signal the end of stream
stream.push(null)
return stream
}
}
I still like handling in the stream better but would love to hear reasons why you might do it one way or the other.
I am working on creating an API gateway and so far I can manage to get everything to work for single instances where I hard code the IP and port of the server I want my API gateway to connect to.
I have seen few examples of how to accomplish service discovery with Consul/Node.js.
I did find this one and I have tried to make it work but I can't get the watcher to work right and pass me the IP:port combo I need to connect to the service.
All of the below files are just parts and not the whole thing. The important parts are there though.
This is my app.js file
app.listen(port, () => {
const CONSUL_ID = require('uuid').v4();
const ip = require('ip');
const my_IP = ip.address();
let options = {
name: 'api-gateway',
address: `${my_IP}`,
port: 8080,
id: CONSUL_ID,
check: {
ttl: '10s',
deregister_critical_service_after: '1m'
}
};
consul.agent.service.register(options, function(err) {
if (err) throw err;
console.log(`Registered service with ID of ${CONSUL_ID}`);
});
setInterval(() => {
consul.agent.check.pass({id:`service:${CONSUL_ID}`}, err => {
if (err) throw new Error(err);
});
}, 5 * 1000);
process.on('SIGINT', () => {
console.log(`SIGINT. De-Registering service with ID of ${CONSUL_ID}`);
consul.agent.service.deregister(CONSUL_ID, (err) => {
if(err) console.log(`Error de-registering service from consul, with error of : ${err}`);
if(!err) console.log(`De-registered service with ID of ${CONSUL_ID}`);
process.exit();
});
});
console.log(`API gateway server running express started on port ${port}.`);
});
I have two other test services running that the only difference between my app.js above and these test ones are that they have name: 'test-service', for the service name.
This is my routes file
var consul = require("consul")({host: '10.0.1.248'});
var known_data_instances = [];
const apiAdapter = require('./apiAdapter')
// Keep a list of healthy services
var watcher = consul.watch({
method: consul.health.service,
options: {
service:'test-service',
passing:true
}
});
watcher.on('change', data => {
console.log('received discovery update:', data.length);
known_data_instances = [];
data.forEach(entry => {
known_data_instances.push(`http://${entry.Service.Address}:${entry.Service.Port}/`);
});
});
watcher.on('error', err => {
console.error('watch error', err);
});
const BASE_URL = known_data_instances[Math.floor(Math.random()*known_data_instances.length)];
// const api = apiAdapter(BASE_URL + ':8081');
const api = apiAdapter(BASE_URL);
// router.get('/login', isAuthorized, (req, res) => {
router.get('/login', (req, res) => {
api.get(req.path).then(resp => {
res.send(resp.data);
});
});
API adapter file
const axios = require('axios');
module.exports = (baseURL) => {
return axios.create({
baseURL: baseURL,
});
};
I am expecting to be able to send a request using axios to a HTTP endpoint on a different service on a different server when getting the IP and port from consul.
Actual result is:
(node:3230) UnhandledPromiseRejectionWarning: Error: connect ECONNREFUSED 127.0.0.1:80
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1191:14)
Seems to me like it is not picking up the URLs and is defaulting back onto localhost port 80.
I'm a bit lost on all of this since I have never build an application with consul and while the consul.io docs are great, there just isn't much of a guide base online for making this easy to learn.
Guess the question comes down to, how do I get my api adapter to make requests to my different services, in this case test-service?
Regular client initiated requests to the node server are captured fine in Fiddler. However, requests sent from node to a web service are not captured. It did not help to pass in config for proxy (127.0.0.1:8888) to the request method. How can I route the request messages through Fiddler?
var http = require('http');
var request = require('request');
request.get(webserviceURL, { "auth" : {"user": "user", "pass" = "pass", sendImmediately: true },
"proxy" : { "host" : "127.0.0.1", "port" : 8888 }},
function (error, response) { console.log( "response received" );
});
Request repo: https://github.com/mikeal/request
I just tried to do this myself (using Fiddler and the request library from npm). Here's how I got it working:
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'; // Ignore 'UNABLE_TO_VERIFY_LEAF_SIGNATURE' authorization error
// Issue the request
request(
{
method: "GET",
uri: "https://secure.somewebsite.com/",
proxy: "http://127.0.0.1:8888" // Note the fully-qualified path to Fiddler proxy. No "https" is required, even for https connections to outside.
},
function(err, response, body) {
console.log("done");
});
This is with Fiddler2 using the default port and proxy options (and no proxy authentication).
Fiddler works by setting your "Internet Options" (from start menu) "Connections" > "LAN Settings" > "Proxy Server" to its port, thus making all HTTP traffic (clients which obey this setting) go through it.
You should point your node.js client lib to use a proxy, the settings are written in that options dialog after you start Fiddler.
The proxy option should be a full url, like this:
proxy : "http://127.0.0.1:8888"
To do this on an ad-hoc basis, without changing your code, you can use environment variables.
Request respects:
HTTP_PROXY
HTTPS_PROXY
NO_PROXY
So, to proxy just set these in your console before running your process.
For example, to setup http and https proxy use:
set HTTP_PROXY="http://127.0.0.1:8888"
set HTTPS_PROXY="http://127.0.0.1:8888"
set NODE_TLS_REJECT_UNAUTHORIZED=0
The latter line stops issues with SSL through the fiddler proxy.
I've been wanting the same... an equivalent of the Network tab in chrome DevTools, only for Nodejs. Unfortunately, it doesn't appear as though one exists. I don't have Fiddler on macos, so this is how I went about stubbing the require('http') methods to log and pass though. Leaving this here in case I need it again or someone else finds it helpful. You can turn it on by attaching a debugger and require('filename')() the file containing this script.
module.exports = () => {
const http = require('http');
http._request = http.request;
global.DO_LOG_AJAX = true;
const log = str => {
if (global.DO_LOG_AJAX) {
console.debug(str);
}
};
const flushLog = (requestLines, responseLines) => {
if (global.DO_LOG_AJAX) {
log([
'----------------Begin Request-----------------------------------',
...requestLines,
'----------------End Request / Begin Response--------------------',
...responseLines,
'----------------End Reponse-------------------------------------',
].join('\n'));
}
};
let write;
let end;
http.request = (...requestParams) => {
const req = http._request(...requestParams);
const { method, path, headers, host, port } = requestParams[0];
const requestLogLines = [];
requestLogLines.push(`${method} ${path}`);
requestLogLines.push(`Host: ${host}:${port}`);
for (const header of Object.keys(headers)) {
requestLogLines.push(`${header}: ${headers[header]}`);
}
write = write || req.write;
end = end || req.end;
req.on('error', err => {
log({ err });
});
req._write = write;
req._end = end;
const requestBody = [];
req.write = (...writeParams) => {
requestBody.push(writeParams[0].toString());
return req._write(...writeParams);
};
req.end = (...endParams) => {
if (endParams[0]) {
requestBody.push(endParams[0].toString());
}
requestLogLines.push('');
requestLogLines.push(requestBody.join(''));
return req._end(...endParams);
};
const responseLogLines = [];
req.once('response', response => {
const responseBody = [];
responseLogLines.push(`${response.statusCode} ${response.statusMessage}`);
for (const header of Object.keys(response.headers)) {
responseLogLines.push(`${header}: ${response.headers[header]}`);
}
const onData = chunk => {
responseBody.push(chunk.toString());
};
const onClose = err => {
responseLogLines.push('');
responseLogLines.push(responseBody.join(''));
responseLogLines.push('');
responseLogLines.push(`--- ERROR --- ${err.toString()}`);
flushLog(requestLogLines, responseLogLines);
req.removeListener('data', onData);
};
const onEnd = () => {
responseLogLines.push('');
responseLogLines.push(responseBody.join(''));
flushLog(requestLogLines, responseLogLines);
req.removeListener('data', onData);
};
response.on('data', onData);
response.once('close', onClose);
response.once('end', onEnd);
});
return req;
};
};