Because of an error that I had in the client, my node / express API was getting the exact same PUT request twice simultaneously everytime that a form was sent.
In response, the server was always crashing with the following error:
Error: Can't set headers after they are sent
Now I fixed the client so server is not crashing anymore, but I'd like to fix the API properly. Anyone could kill my server by sending two PUT requests simultaneously.
So my question:
How to best handle the "can't set headers" error with Express?
What is the mistake in my server code that allowed for that error to happen in the first place?
Here is my server code:
server.js
router.route('/deals/:id').put(deal.update);
deal.js
var r = {};
exports.update = function(req, res) {
r = res;
var u = {
$set: {}
}
for(var x in req.body){
if (['name','stages'].indexOf(x)>=0)
u.$set[x] = req.body[x];
}
Pipeline // this is a mongoose object
.findOneAndUpdate({'_id.id':req.params.id},u,{new:true})
.exec(respond);
}
function respond(err, data) {
if (err) {
return r.send(err);
} else {
return r.json(data);
}
}
UPDATE:
The error came from how I declared a global r in order to have a single response callback outside for all of my methods.
I fixed it by having one respond callback for every method.
Still interested to know if I could get away with a bind(res) instead?
You missed a return, so that when there was an error, you called res.send twice. With the return, execution will stop after the first one.
I see what you are trying to do for your follow on question, but making a global variable r is definitely not a good way to go. The way I do patterns like this is to pass res to the follow on function - see below. There may be a better way, but this seems legitimate at least.
exports.update = function(req, res) {
var u = {
$set: {}
}
for(var x in req.body){
if (['name','stages'].indexOf(x)>=0)
u.$set[x] = req.body[x];
}
Pipeline // this is a mongoose object
.findOneAndUpdate({'_id.id':req.params.id},u,{new:true})
.exec(function(err, data) { respond(err, data, res) });
}
function respond(err, data, res) {
if (err) {
return res.send(err);
}
return res.json(data);
}
Related
I have a problem with a routing function in express.
I call from firebase realtime database , the JSON has a lot of nested data and I would like to retrieve it with a for loop.
router.get('/admin_tavoli', async (req, res) => {
try {
var lista_tavoli = await firebase().ref('tavoli').once('value');
var lista_tavoli_val = lista_tavoli.val();
for(var i in lista_tavoli_val){
console.log(lista_tavoli_val[i].comanda.tavolo);
}
res.send('ok');
} catch (err) {
res.json({ message: err })
}
});
If I keep to the first level to JSON for example
for(var i in lista_tavoli_val){
console.log(lista_tavoli_val[i].comanda);
}
there are no problems.
But if I go deeper to JSON
for(var i in lista_tavoli_val){
console.log(lista_tavoli_val[i].comanda.tavolo);
}
the execution of the program goes in error, but the strange thing is that in the terminal I see the correct data.
Why does this happen?
thanks to all for the help
I need to fetch two different MongoDB collections (db.stats and db.tables ) for the same request req.
Now, in the code below, I am nesting the queries within the callback function.
router.post('/', (req, res) => {
let season = String(req.body.year);
let resultData, resultTable;
db.stats.findOne({Year: season}, function (err, data) {
if (data) {
resultData = getResult(data);
db.tables.findOne({Year: season}, function (err, data) {
if (data) {
resultTable = getTable(data);
res.render('index.html', {
data:{
result : resultData,
message: "Working"}
});
} else {
console.log("Error in Tables");
}
});
} else {
console.log("Error in Stats");
}
});
});
This code works, but there a few things that don't seem right. So my question is:
How do I avoid this nested structure? Because it not only looks ugly but also, while I am processing these requests the client side is unresponsive and that is bad.
What you have right now is known as the callback hell in JavaScript. This is where Promises comes in handy.
Here's what you can do:
router.post('/', (req, res) => {
let season = String(req.body.year);
var queries = [
db.stats.findOne({ Year: season }),
db.tables.findOne({ Year: season })
];
Promise.all(queries)
.then(results => {
if (!results[0]) {
console.log("Error in Stats");
return; // bad response. a better way is to return status 500 here
} else if (!results[1]) {
console.log("Error in Tables");
return; // bad response. a better way is to return status 500 here
}
let resultData = getResult(results[0]);
let resultTable = getTable(results[1]);
res.render('index.html', { data: {
result : resultData,
message: "Working"
} });
})
.catch(err => {
console.log("Error in getting queries", err);
// bad response. a better way is to return status 500 here
});
});
It looks like you are using Mongoose as your ODM to access your mongo database. When you don't pass in a function as the second parameter, the value returned by the function call (e.g. db.stats.findOne({ Year: season })) will be a Promise. We will put all of these unresolved Promises in an array and call Promise.all to resolve them. By using Promise.all, you are waiting until all of your database queries get executed before moving on to render your index.html view. In this case, the results of your database function calls will be stored in the results array in the order of your queries array.
Also, I would recommend doing something like res.status(500).send("A descriptive error message here") whenever there is an error on the server side in addition to the console.log calls.
The above will solve your nested structure problem, but latter problem will still be there (i.e. client side is unresponsive when processing these requests). In order to solve this, you need to first identify your bottleneck. What function calls are taking up most of the time? Since you are using findOne, I do not think that will be the bottleneck unless the connection between your server and the database has latency issues.
I am going to assume that the POST request is not done through AJAX since you have res.render in it, so this problem shouldn't be caused by any client-sided code. I suspect that either one of getResult or getTable (or both) is taking up quite a significant amount of time, considering the fact that it causes the client side to be unresponsive. What's the size of the data when you query your database? If the size of it is so huge that it takes a significant amount of time to process, I would recommend changing the way how the request is made. You can use AJAX on the front-end to make a POST request to the back-end, which will then return the response as a JSON object. That way, the page on the browser would not need to reload, and you'll get a better user experience.
mongodb driver return a promise if you dont send a callback so you can use async await
router.post('/', async(req, res) => {
let season = String(req.body.year);
let resultData, resultTable;
try {
const [data1,data2] = await Promise.all([
db.stats.findOne({Year: season}),
db.tables.findOne({Year: season})
]);
if (data1 && data2) {
resultData = getResult(data1);
resultTable = getTable(data2);
return res.render('index.html', {
data: {
result: resultData,
message: "Working"
}
});
}
res.send('error');
console.log("Error");
} catch (err) {
res.send('error');
console.log("Error");
}
});
Ok, lets say I have two Models. Contract and CommLog. Both work find independently but I need many CommLog to relate to each Contract.
In the ContractSchema trying async
ContractSchema.methods.getCommLog = function getCommLog(){
var log = false;
async.parallel([
function(){
CommLog.find({commType:'contract',parent:this._id},function(err,comms){
log = comms;
});
}],
function(){return log;});
};
Where I am trying to use it
router.get('/:code', function(req, res, next) {
Contract.findOne({accessCode:req.params.code},function(err,contract){
if(err)
res.send(err);
var data;
if(contract != null){
var comms = contract.getCommLog();
data = {error:false,data:contract,commlog:comms}
}else{
data = {error:true,message:"No Contract"}
}
res.json(data);
});
});
Where it shows var comms = contract.getCommLog(); It is never returning anything because the getCommLog() is not executing async...
I think its my misunderstanding of mongoose querying, so if you understand what I am trying to accomplish, please let me know what I am doing wrong. I have tried without async which would always return false.
The find call can return all matching results with one query, so I don't think you need async here. The reason it is not populating correctly when you call res.json(data) is because you are not waiting for the method call to finish before you fire off your server response. You would be better off nesting an additional CommLogs.find call within the Contract.find call, and only sending your response once that finishes.
//pseudo code:
Contract.find({}, function(err, contract) {
if(err || !contract) {
//return error response
}
else {
CommLogs.find({contract: contract._id}, function(err, commlogs) {
if(err || !commlogs) {
//return error response 2
}
else {
res.json({errors: false, contract: contract, commlogs: commlogs});
}
});
}
}
I am trying to keep a session open with the Bloomberg Public API, relaying calls from my own service's API to it to fetch data. I am running the Node.JS / Express server locally right now. I have an API route that works fine the first time: I send the GET, and quickly get the response back. If I then send another GET to the same route, and I can see the data that the Bloomberg API returns in my server console, but it seems that the server gets stuck at the res.send(...) and I have no Idea why. I've tried numerous things like moving code blocks around and forcefully destroying variables, but to no avail. Do you guys see anything obvious that would/might work?
'use strict';
var _ = require('lodash');
var Blpapi = require('./blpapi.model');
var count = 0;
var blpapi = require('blpapi');
// Add 'authenticationOptions' key to session options if necessary.
var session = new blpapi.Session({ serverHost: '10.8.8.1', serverPort: 8194 });
var service_refdata = 1; // Unique identifier for refdata service
session.start();
session.on('SessionStarted', function(m) {
console.log(m);
session.openService('//blp/refdata', service_refdata);
});
session.on('ServiceOpened', function(m) {
console.log(m);
});
session.on('SessionStartupFailure', function(m) {
console.log('SessionStartupFailure', util.inspect(m));
session.stop();
session.destroy();
});
session.on('SessionTerminated', function(m) {
console.log('Session Terminated');
session.stop();
session.destroy();
});
exports.getStock = function (req, res) {
var stock = req.url.substring(8, req.url.length);
stock = stock.replace(/_/g, ' ');
session.on('HistoricalDataResponse', function(m) {
console.log(m);
if(m.eventType === 'RESPONSE' && m.correlations[0].value === 101) {
console.log('send');
res.send(m.data.securityData);
}
else {
res.send(500);
}
});
newRequest(stock);
};
function newRequest(sec) {
if(typeof sec !== 'string') return;
session.request('//blp/refdata', 'HistoricalDataRequest',
{ securities: [sec],
fields: ['PX_LAST', 'OPEN'],
startDate: "20140101",
endDate: "20140301",
periodicitySelection: "DAILY" }, 101);
}
function handleError(res, err) {
return res.send(500, err);
}
Edit1: If I change the res.send(m.data.securityData); to res.send(201);, the requests come back fine, so I'm figuring it has to do with that object.
I figured it out. It's because I was declaring the session.on('HistoricalDataResponse', .... statement inside of my route controller. Moving it out and adding a bit of logic around it solved the problem.
I am trying to developpe an API with NodeJs which accepts an object containing multiple queries to mongdb and answers an object with the different results (in fact Json).
I use express and my code is :
var nb_query=0;
var results;
//api
app.get("/api/:p",api);
function api(req, res) {
var jsonq=decodeURIComponent(req.params.p);
//console.log(jsonq);
var queries=JSON.parse(jsonq);
nb_query=Object.keys(queries).length;
results={};
for(var nq in queries) { // for each query
do_find_query(nq,queries[nq], function() {
//todo : managing head
res.end(JSON.stringify(results));
}
);
}
} // end of api function
function do_find_query (name_query,query,callback) {
var collection=fdb.collection(query.collection);
collection.find(query.find,query.fields,query.options).toArray(function(err,docs) {
if(err) throw err;
results[name_query]=docs;
nb_query--;
if(nb_query==0)
callback();
}
);
}
As you see, I use global vars to store the results and the counter nb_query. And I ask myself if it is a problem or not (now no because I am alone on the server, but when we will be thousands of billions? :-) ).
As I understand Node, there is only one thread and I think Node will finalize a started job unless he encoutered an io access. In this case, he stacks the io with the callback, and begins to answer to a new request.
If this is correct, I think that Node could answer to 2 or more different calls to my api (which need mongo calls) and so store different values in global vars which is shared (there's only one thread).
If this is right, I would also know what is the best way to change it.
I have the idea of declaring results and nb_query in api function and pass it to do_find_query, but nb_query isn't an object and is so not changed correctly.
I know I can put nb_query in an object to pass it 'by reference', but I want to know first if it is necessary and if it is a good way or if there is a better one.
Thanks for your help !
Doom.
------------------------------------------------------------------------------
EDIT :
I have change my code and it seems to work without global vars and without async library (which is for me using a hammer to swat a fly)
//api
app.get("/api/:p",api);
function api(req, res) {
var jsonq=decodeURIComponent(req.params.p);
//console.log(jsonq);
var queries=JSON.parse(jsonq);
var query_names=Object.keys(queries);
var results={};
var query_left=query_names.length;
query_names.map( function(query_name) {
var query=queries[query_name];
var collection=fdb.collection(query.collection);
collection.find(query.find,query.fields,query.options).toArray(function(err,docs) {
if(err) throw err; //todo : handle errors in a better way
results[query_name]=docs;
if(--query_left==0)
res.json(results);
}
);
}
);
}
But I still do not know if this is necessary to do or not. (I think so but I am new in Node so ...)
Thanks to mscdex as his answer make me known res.json() and help me understand scope of variable.
Instead of using globals, try this (uses the async module):
var async = require('async');
// ...
app.get('/api/:p', api);
function api(req, res) {
var jsonq = decodeURIComponent(req.params.p),
queries = JSON.parse(jsonq),
keys = Object.keys(queries),
queriesLeft = keys.length,
results = {};
async.each(keys, function(name, cb) {
var query = queries[name],
collection = fdb.collection(query.collection);
collection.find(query.find, query.fields, query.options)
.toArray(function(err, docs) {
if (err) return cb(err);
results[name] = docs;
cb();
}
);
}, function(err) {
if (err) throw err; // TODO: handle better
res.json(results);
});
} // end of api function