Acknowledge Response sent too late; NodeJS Express Asynchrone Response - node.js

I am using NodeJS with express.
I receive from an information publisher a request. I must send back an acknowledge response as soon as I receive the request. Then I should process the request and push back a response.
According to their logs, I sent back the acknowledge response too late and also after the push of the real response. This is wrong and not what I want. I want to follow the model in this picture:
So I have a router.js file with the following code:
app.post('/import/message.io', Importer.handleMessage);
In the importer controller the handleMessage function handles the request:
function handleMessage(req, res){
let otaRequestBuffer = [];
req.on('readable', () => {
let data;
while (data = req.read()) {
otaRequestBuffer.push(data.toString());
}
});
req.on('end', () => {
let otaRequest = otaRequestBuffer.join('');
try {
let parser = new DOMParser();
let xmlDoc = parser.parseFromString(otaRequest, "text/xml");
let soapHeader = parseSoapHeader(xmlDoc);
const soapAction = req.headers['soapaction'].toString();
// Acknowledge the request; The soapHeader is used in here too. That is why it is after the code above
res.writeHead(200);
res.write(
`<?xml version='1.0' encoding='utf-8'?> ...
`
);
res.end();
// Kick off a new job by adding it to the work queue; Response is send in the consumer.js of this job
let job = jobs.create('worker-job', args);
job.on('complete', function(result){
console.log('Job completed');
// Other job.on code would be here
})
} catch (error) {
console.log(error);
} finally {
console.log('DevInfo: Finally called here');
}
});
Other insights:
So the acknowledge response is in the right format.
When I test locally I receive the acknowledge immediately
The logs of the information publisher are here
According to the logs it seems like they actual response (Line5) is pushed to them before the acknowledge response (Line1) .
Is this possible with the code example I pasted above?
Thanks for the help.

It looks to be entirely possible that the real response (push) is sent before the acknowledgement is received by the client. Since response.end() is asynchronous (e.g. the call will complete before the response is sent), we're effectively racing
res.end();
with
let job = jobs.create('worker-job', args);
This is the kind of annoying thing one tends to see only once one goes to production!
Perhaps you could try queuing your job once the end() callback is called. e.g.
res.end(() => {
console.log("End callback, response stream is complete, starting job...");
// Kick off a new job by adding it to the work queue; Response is send in the consumer.js of this job
let job = jobs.create('worker-job', args);
job.on('complete', function(result){
console.log('Job completed');
// Other job.on code would be here
})
});
Since (from the Node.js docs)...
If callback is specified, it will be called when the response stream is finished.
https://nodejs.org/api/http.html#http_response_end_data_encoding_callback
(This is the method that Express wraps with response.end)
http://expressjs.com/en/5x/api.html#res.end
I'd also suggest logging detailed timestamps down to the millisecond on your server, so you can see exactly when calls are happening.
The setTimeout approach is certainly worth a try, it might get the results you wish:
// Set delay as appropriate
const delayMs = 2000;
setTimeout(() => {
console.log("Timeout complete, starting job...");
// Kick off a new job by adding it to the work queue; Response is send in the consumer.js of this job
let job = jobs.create('worker-job', args);
job.on('complete', function(result){
console.log('Job completed');
// Other job.on code would be here
})
}, delayMs);

Related

Websocket - Waiting for a http request callback to execute before next pusher event

So I'm working with websockets to process data from website's API. For every new event I also send some http requests back to the website in order to obtain more data. Up untill now everything has worked fine, but now that I started using async requests to speed it up a bit things got a bit different. My code used to process one event and then move on to the next one (these events come in extremely quick - around 10 per second) but now it just seems to ignore the async (non blocking) part and move on to the next event and that way it just skips over half of the code. Note that the code works fine outside the Pusher. I'm using the 'pusher-client' module. My code looks like this:
var Request = require("request");
var requestSync = require('sync-request');
var Pusher = require('pusher-client');
var events_channel = pusher.subscribe('inventory_changes');
events_channel1.bind('listed', function(data)
{
var var2;
//Async request (to speed up the code)
function myFunction(callback){
request("url", function(error, response, body) {
if (!error && response.statusCode == 200)
{
result = JSON.stringify(JSON.parse(body));
return callback(null, result);
}
else
{
return callback(error, null);
}
});
}
myFunction(function(err, data){
if(!err)
{
var2 = data
return(data);
}
else
{
return(err);
}
});
//The part of the code below waits for the callback and the executes some code
var var1 = var2;
check();
function check()
{
if(var2 === var1)
{
setTimeout(check, 10);
return;
}
var1 = var2;
//A CHUNK OF CODE EXECUTES HERE (connected to the data from the callback)
}
});
In conclusion the code works, but not inside the pusher due to the pusher skipping the asynchronous request. How would I make the pusher wait for my async request to finish, before processing the next event (I have no idea)? If you happen to know, please let me know :)
You need to implement a queue to handle events one after another. I'm curious how it worked before, even without Pusher you'd have to implement some queue mechanism for it.
const eventsQueue = []
events_channel1.bind('listed', function(data) {
eventsQueue.push(data)
handleNewEvent()
})
let processingEvent = false
function handleNewEvent() {
if (processingEvent) return // do nothing if already processing an event
processingEvent = true
const eventData = eventsQueue.shift() // pick the first element from array
if (!eventData) return // all events are handled at the moment
... // handle event data here
processingEvent = false
handleNewEvent() // handle next event
}
Also, you should call clearTimeout method to clear your timeout when you don;t need it anymore.
And it's better to use promises or async/await instead of callbacks. Your code will be much easier to read and maintain.

Node.js child process fork return response -- Cannot set headers after they are sent to the client

situation:
have an function that does an expensive operation such as fetching a large query from mongodb, then performing a lot of parsing and analysis on the response. I have offloaded this expensive operation to a child process fork, and waiting for the worker to be done before sending response in order to not block the main event loop.
current implentation:
I have an API endpoint GET {{backend}}/api/missionHistory/flightSummary?days=90&token={{token}}
api entry point code:
missionReports.js
const cp = require('child_process');
//if reportChild is initailzed here, "Cant send headers after they were sent"
const reportChild = cp.fork('workers/reportWorker.js');
exports.flightSummary = function (req, res) {
let request = req.query;
// if initialized here, there is no error.
const reportChild = cp.fork('workers/reportWorker.js');
logger.debug(search_query);
let payload = {'task':'flight_summary', 'search_params': search_query};
reportChild.send(payload);
reportChild.on('message', function (msg) {
logger.info(msg);
if (msg.worker_status === 'completed')
{
return res.json(msg.data);
}
});
};
worker code:
reportWorker.js
process.on('message', function (msg) {
process.send({'worker_status': 'started'});
console.log(msg);
switch(msg.task)
{
case 'flight_summary':
findFlightHours(msg.search_params,function (response) {
logger.info('completed')
process.send({'worker_status': 'completed', 'data':response});
})
break;
}
});
scenario 1: reportChild (fork) is initialized at beginning of module definitions. api call works once, and returns correct data. on second call, it crashes with cannot send headers after theyve been sent. I stepped through the code, and it definitely only sends it once per api call.
scenario 2: if i initalize the reportChild inside of the api definition, it works perfectly every time. Why is that? Is the child forked process not killed unless its redefined? Is this standard implementation of child proceses?
This is my first attempt at threading in node.js, I am trying to move expensive operations off of the main event loop into different workers. Let me know what is best practice for this situation. Thanks.

Getting a JSON from a website Node JS

So I'm fairly new to node js, and am having trouble wrapping my head around asynchronous programming. I'm trying to get a JSON from a website and pass it to a variable for use later, to test I have been using this code:
var https = require("https");
var a;
function getter(url){
var request = https.get(url, function(response){
var body = "";
response.on("data", function(chunk){
body += chunk;
});
response.on("end", function(){
if(response.statusCode === 200){
try{
a = JSON.parse(body);
}catch(err){
console.log(err);
}
}
})
})
};
getter('https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY');
console.log(a);
When I run this I get a as undefined, which seems to make sense from what I've read. But I'm unclear as to what to do from here. How would I go about passing this JSON into a variable?
http.get is asynchronous and executes the event handlers when the events occur. When you call getter() this function immediately returns, ie it does not wait for the events and the next statement console.log(a) is executed.
Furthermore, js is single threaded, and the current execution stack is never interrupted for any other event/callback or whatsoever. So the event handlers can only run if the current execution has come to an end, ie contains noch more statements. Thus, your console.log() will always be executed before any eventhandler of the request, thus a is still undefined.
If you want to continue after the request finished, you have to do it from the eventhandler.
See this excellent presentation for some more details https://youtu.be/8aGhZQkoFbQ

Creating promises in loop

I have to create promises in loop according to given config file and return response when all are resolved. Here goes the code-
{for(let type in spotlight){
switch (type){
case "outliers":{
let ops= spotlight[type];
for(let i=0;i<ops.length;i++){
(function(op){
let p= new Promise(function(resolve,reject){
let reqUrl= urlCreator(op.uri,op.query);
//console.log("--------------------"+reqUrl);
apiService.get(reqUrl,function(isSuccess,data){
if(!isSuccess){
return reject(data);
}
// console.log(isSuccess);
// console.log(data);
// console.log("trend is ------"+JSON.stringify(op));
// create objects array
// let temp= [];
// let overallScore= data.overall.score;
// for(let day in overallScore){
// temp.push({"key": day,"value": parseFloat(overallScore[day])});
// }
//let outliers= stats.outliers(temp,"key","value");
resolve({"type":type,"name": op.name,"data": outliers});
})
});
promiseArray.push(p);
}(ops[i]))
}
break;
}
case "filters":{
let ops= spotlight[type];
for(let i=0;i<ops.length;i++){
(function(op){
let p= new Promise(function(resolve,reject){
let reqUrl= urlCreator(op.uri,op.query);
apiService.get(reqUrl,function(isSuccess,data){
if(!isSuccess){
return reject(data);
}
// console.log(isSuccess);
// console.log(data);
// console.log("coc is ------"+JSON.stringify(op));
resolve({"type": type,"name": op.name,"data": data});
})
})
promiseArray.push(p);
}(ops[i]))
}
break;
}
}
}
Promise.all(promiseArray).then(values=>{
return res.json(values);
},
reason=>{
return res.json(reason);
}).catch(reason=>{
return res.json(reason);
})}
Problem is that promises never return, neither resolved, nor rejected. According to the config file, it has to hit two URLs, say u1 and u2. I tried to log the output to see which requests are returning. When the server is started and very first req is made, U1 returns and req hangs. on refresh I get response from U2,U2 and request hangs, then on refresh again U1,U1 and this continues. It seems to me that for some reason only one request is returned and other sits in buffer or something and come when next request is made. Both requests are being made to the local server only, I am routing it externally just to make use of cache as url is being used as key for cache.
I tried using dummy urls like facebook.com and google.com, and it works perfectly fine.Using one local url and another like facebook.com also works, but when both urls are of local server, it gets stuck.
Does it has any thing to do with single threaded nature of node or due to using same socket for making both requests.
PS- I am using npm-request to make URL calls.
Perhaps hesitating before making the second request would solve your problem.
I've made some tools that could help with that. See the MacroQTools.js file at
https://github.com/J-Adrian-Zimmer/JavascriptPromisesClarified.git
You're defining the request callback as function(success , data), while request consumes error-first callbacks, defined like function(error , response).
You're calling request like:
apiService.get(reqUrl,function(isSuccess,data){
if(!isSuccess){
return reject(data);
}
// console.log(isSuccess);
// console.log(data);
// console.log("coc is ------"+JSON.stringify(op));
resolve({"type": type,"name": op.name,"data": data});
});
Pretending that, if the first parameter misses, you have to reject it with the second parameter, data. While, really, it would something like:
apiService.get(reqUrl,function(err,data){
if(err){
reject(err);
}
else{
// console.log(isSuccess);
// console.log(data);
// console.log("coc is ------"+JSON.stringify(op));
resolve({"type": type,"name": op.name,"data": data});
}
});
Since request expects error-first callbacks (like almost anything in node that takes a callback).
So, when the requests actually work as expected, your code must be actually rejecting the promises with the actual real value, since when the request works, isSuccess is null and data has the real response value.
This surely is breaking something and is not good, while just fixing it maybe doesn't solve your issue completely: I believe your requests are acting weird because some configuration problem of your api, not just because you're rejecting promises when requests are successful (that would just send the data as the rejection reason).
Also you're handling the rejection of Promise.all() twice, passing a second handler to then and calling catch again. Only one is needed, and the .catch(handler) is probably better.
I made a small working example on how you can use Promise.all to collect async requests. I used imdb as the apiService, but any async http service would work too. I didn't reproduce totally from your code, but I'm sure you can adapt this to make your code work, at least the part of the code that is just consuming http services.
var express = require('express');
var app = express();
var Promise = require('bluebird');
var imdb = require('imdb-api');
app.get('/', controllerHandler );
app.listen(3000, function () {
console.log('Example app listening on port 3000!')
});
var apiService = {}
apiService.get = imdb.getReq;
function controllerHandler(request , response){
//like iterating through spotlight.type and returning an array of promises from it.
//in this case the array is from films and Airbag is obviously the best of them
var promises = [{name : 'The Matrix'} , { name : 'Avatar'} , {name : 'Airbag'}].map( createPromise );
//use either .catch(errorHandler) or then( successHandler , errorHandler ). The former is the better:
Promise.all(promises).then( successHandler ).catch( errorHandler );
function successHandler(result){
return response.json(result);
}
function errorHandler(reason){
console.log('There was an error calling to the service:');
console.log(reason);
return response.send('there was an error');
}
}
function createPromise(film){
return new Promise( function(resolve , reject){
apiService.get(film , function(err , data){
if(err)
reject( new Error(err));
else
resolve( {title : data.title , year : data.year} );
});
});
};

Responding to a callback from a child message in node.js

I've run into a problem with node.js and can't figure out the correct way to handle this situation.
I have worker process that handles all the data for a leaderboard. When a request comes in for the leaderboard, I send the request to the worker to handle. The worker will send back the response via the child_process messaging.
My problem is how to efficiently get the response to the callback. This is my first attempt, but wont work as I'm always rebinding the 'message' event to a different callback.
Manager.setup_worker = function () {
Manager.worker = require('child_process').fork("./workers/leaderboard");
}
Manager.process_request = function (request, callback) {
Manager.worker.on("message", function (response) {
callback(response);
})
Manager.worker.send(request);
}

Resources