Can't set headers after they are sent to the client [ERR_HTTP_HEADERS_SENT] - node.js

I understood why it was an error, I tried the suggestions like return or do function aysnc, but I could not solve it.
I have documents as long as find.lenght and 100 sub-documents. That's why I am use two loop. When first iteration is complete and second matching value is found, server is crashed. Without server it works.
I have documents called 0,1,2 and there are 100 crypto coins records from 0 to 100 in them. I added an image. For example, USDT is at [0,3], [1,43], [2,13] and I want to send all of three.
app.post('/', (req, res) => {
print(req, res)
})
function print(req, res) {
MongoClient.connect(url, function (err, db) {
if (err) throw err;
var dbo = db.db("cryptoDb");
dbo.collection("coinTable").find({}).toArray(function (err, find) {
if (err) throw err;
for (i = 0; i < find.length; i++) {
let found = false;
for (j = 0; j < 100; j++) {
//console.log(i + " " + j)
let id = Capitalize(req.body.coinName);
if (find[i].result[j].name == id || find[i].result[j].symbol == id.toUpperCase()) {
// console.log(find[i].result[j]);
res.send(find[i].result[j]);
found = true;
}
}
if (!found) {
console.log("Not found")
}
}
db.close();
});
});
function Capitalize(s) {
return s[0].toUpperCase() + s.slice(1).toLowerCase();
}
}
Thank you so much !

This error comes from attempting to send more than one response to the same http request.
You have res.send(find[i].result[j]); inside a for loop and you do not stop the inner loop after sending a response.
Thus, this code is capable of attempting to send multiple responses to the same request which you cannot do.
It's unclear from the code exactly what the desired solution is. If you only want to send the first response, then you can close the db and return after you send a response which will terminate both for loops.
If you intend to send multiple pieces of data, then accumulate the data you want to send in an array and send all the data once after all the loops are done.
If you're trying to send an array of all matching results, you can do this:
app.post('/', (req, res) => {
print(req, res)
})
function print(req, res) {
MongoClient.connect(url, function(err, db) {
if (err) {
console.log(err);
res.sendStatus(500);
return;
}
var dbo = db.db("cryptoDb");
dbo.collection("coinTable").find({}).toArray(function(err, find) {
if (err) {
console.log(err);
res.sendStatus(500);
db.close();
return;
}
const id = Capitalize(req.body.coinName);
const idUpper = id.toUpperCase();
const results = [];
for (let findItem of find) {
for (let j = 0; j < 100; j++) {
if (findItem.result[j].name == id || findItem.result[j].symbol == idUpper) {
results.push(findItem.result[j]);
}
}
}
res.send(results);
if (!results.length) {
console.log("No results");
}
db.close();
});
});
function Capitalize(s) {
return s[0].toUpperCase() + s.slice(1).toLowerCase();
}
}
Other Notes:
I changed the if (err) { ... } handling on your database call to actually send an error response. All paths through your request handler need to send a response or cause a response to be sent.
The hard coded loop from 0 to 99 is a bit odd as you don't check if the .result array actually has that many entries in it. That could result in a run-time error if the data isn't exactly how you are expecting it.
You don't have any validation of the req.body data you are expecting. All data arriving from the user should be validated before assuming it is what you are expecting.

You have res.send inside of your for loop, im assuming you want it to quit once its done, so add break to your loop.
var senddata = [];
for (j = 0; j < 100; j++) { // <-- For loop, it will send multiple times
// console.log(i + " " + j)
let id = Capitalize(req.body.coinName);
if (find[i].result[j].name == id || find[i].result[j].symbol == id.toUpperCase()) {
// console.log(find[i].result[j]);
senddata[senddata.length] = find[i].result[j]); // Add to array
found = true;
}
if (!found) {
console.log("Not found")
}
}
res.send(JSON.stringify({senddata})); // Send unparsed data

Related

How to return an object within inside for loop

I want to return "items" which is inside the for loop and also two additional functions."Items" is an object (I would not say variable) which consists of three array elements and that can be more depending on the situation. So I need to return "items" so I can access it outside and I can send it to the client using res.send(). If I send data inside the loop and function, it is returning with an error called "Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client". I found the fix for it but on implementing them, nothing is happening. It is throwing me the same error. I was thinking to do it call back function but I am confused about how to use it in this case. Thanks in advance.
router.get("/send" , async (req, res) => {
try {
res.send("hello")
//await sendData()
getCollectionNames()
} catch (error) {
console.log(error);
}
function getCollectionNames(){
MongoClient.connect(url, function(err, db) {
var db = db.db('admin')
mongoose.connection.db.listCollections().toArray(function (err, names) {
for(let index = 0; index < names.length; index ++){
if (err) {
console.log(err);
}
let name = names[index].name
const collection = db.collection(name)
collection.find().toArray(function(err, items){
console.log(items)
})
}
});
})
}
})

callback is not a function node js

I am new to javascript and i am having trouble solving this error. I get the message: "callback is not a function" at:"return callback(rolesArray)".
Rol.getAllRoles = function(callback){
sql = "select role from Role;";
var rolesArray = [];
var role;
mysql.connection(function(err,conn){
if (err){
return callback(err);
}
conn.query(sql,function(err,rows){
if (err){
return callback(err);
}
for(var i=0; i < rows.length; i++){
role = rows[i].role;
rolesArray.push(rol);
}
console.log("roles: " + rolesArray);
return callback(rolesArray);
});
});
}
The console.log outputs:"roles: admin,customer" so the connection with the database works.
That error means that you are not passing a function to Rol.getAllRoles(fn) when you call it.
In addition, so that you can have proper error handling in your callback and so you can more easily distinguish between an error and the actual data, you should always pass a first argument to the callback that indicates whether there was an error or not and then the second argument (if not an error) can be your results array like this:
Rol.getAllRoles = function(callback){
sql = "select role from Role;";
var rolesArray = [];
var role;
mysql.connection(function(err,conn){
if (err){
return callback(err);
}
conn.query(sql,function(err,rows){
if (err){
return callback(err);
}
for(var i=0; i < rows.length; i++){
role = rows[i].role;
rolesArray.push(rol);
}
console.log("roles: " + rolesArray);
// make sure the first argument to the callback
// is an error value, null if no error
return callback(null, rolesArray);
});
});
}
And, then you should be calling it like this:
Rol.getAllRoles(function(err, rolesArray) {
if (err) {
// handle error here
} else {
// process rolesArray here
}
});
This style of calling an async callback as in callback(err, data) is a very common async callback design pattern. It allows all callers to see if there was an error or not and if there was no error to get access to the final result.
I'd suggest the following:
Rol.getAllRoles = function(callback){
var sql = "select role from Role;";
var rolesArray = [];
var role;
callback = callback || function(){};
mysql.connection(function(err,conn){
if (err){
return callback(err);
}
conn.query(sql,function(err,rows){
if (err){
return callback(err);
}
for(var i=0; i < rows.length; i++){
role = rows[i].role;
rolesArray.push(rol);
}
console.log("roles: " + rolesArray);
return callback(rolesArray);
});
});
}
This way you enforce that callback is always a function. If you run it like Rol.getAllRoles() then you would get an error previously. Now you wont. You wont get any data back though.
Make sure you are calling Rol.getAllRoles with the proper parameter (ie: a function).

synchronous loop in nodejs

//Importing: postges DB connection
var pg = require('pg');
var conString = "postgres://readxxx:p#ssword#vmwoxxx-tst:8888/worxxx";
var prvsiteid = '';
var cursiteid = '';
var qurystring = '';
pg.connect(conString, function(err, client, done) {
if (err) {
return console.error('error fetching client from pool', err);
return;
}
client.query("select site_id,created_at,started_at,completed_at,notes,finish_code from _background_tasks where finish_code > 0 and site_id > 0 and abs(extract(Epoch from (now()::timestamp without time zone - completed_at)))/60 <= 4600 order by site_id asc", function(err, result1) {
done();
if (err) {
return console.error('error running query', err);
return;
}
for (var i = 0; i < result1.rowCount; i++) {
cursiteid = result1.rows[i].site_id;
if (prvsiteid != cursiteid) {
prvsiteid = cursiteid;
qurystring = "select trim(su.name) as name, case When trim(su.email) is null then su.name || '#netapp.com' when trim(su.email) > '' then su.name || '#netapp.com' else su.name end uemail,su.friendly_name as frdname from system_users su where su.state = 'active' and su.id in (select distinct(system_user_id) from users u where u.site_id = " + cursiteid + "and u.admin_level >= 5)"
client.query(qurystring, function(err, result2) {
done();
if (err) {
return console.error('error running query', err);
return;
}
for (var j = 0; j < result2.rowCount; j++) {
console.log(cursiteid, result2.rows[j].name, result2.rows[j].uemail, result2.rows[j].frdname);
}
});
}
console.log(result1.rows[i].site_id, result1.rows[i].created_at, result1.rows[i].started_at, result1.rows[i].completed_at);
}
});
});
I know NodeJS programs are asynchronous but this scenario I intend it to be synchronous.
for loop(outer) --> for loop(inner) when outer forloop changes with new site id i want to send email to all the emailids from inner loop and also the resultant rows of the each site of outer loop has to printed.
If the library you are using doesn't support synchronous operations, you can't use it synchronously without abominations (You do not want abominations in your code. No matter how much the boss is pressing).
You can fairly simply perform loops and other actions on asynchronous operations by using Promises, specifically, bluebird which supports the ability to take callback-based asynchronous functions and transform them into Promise-ready functions.

Synchronous for loop in node js

So let's say I have the following for loop
for(var i = 0; i < array.length; i++){
Model.findOne({ _id = array[i].id}, function(err, found){
//Some stuff
});
}
How do I make this code work? Every time I run it I get array[i] = undefinedbecause the mongo-db query is asynchronous and the loop has already iterated 5 times by the time the first query is even completed. How do I go about tackling this issue and waiting for the query to complete before going on to the next iteration?
This doesn't specifically answer your question, but addresses your problem.
I'd use an $in query and do the filtering all at once. 20 calls to the db is pretty slow compared to 1:
// grab your ids
var arrayIds = myArray.map(function(item) {
return item._id;
});
// find all of them
Model.find({_id: {$in: arrayIds}}, function(error, foundItems) {
if (error) {
// error handle
}
// set up a map of the found ids
var foundItemsMap = {};
foundItems.forEach(function(item) {
foundItemsMap[item._id] = true;
});
// pull out your items that haven't been created yet
var newItems = [];
for (var i = 0; i < myArray.length; i++) {
var arrayItem = myArray[i];
if ( foundItemsMap[arrayItem._id] ) {
// this array item exists in the map of foundIds
// so the item already exists in the database
}
else {
// it doesn't exist, push it into the new array
newItems.push(arrayItem);
}
}
// now you have `newItems`, an array of objects that aren't in the database
});
One of the easiest ways to accomplish something like you want is using promises. You could use the library q to do this:
var Q = require('q');
function fetchOne(id) {
var deferred = Q.defer();
Model.findOne({ _id = id}, function(err, found){
if(err) deferred.reject(err);
else deferred.resolve(found);
});
return deferred.promise;
}
function fetch(ids, action) {
if(ids.length === 0) return;
var id = ids.pop();
fetchOne(id).then(function(model) {
action(model);
fetch(ids, action);
});
}
fetch([1,2,3,4,5], function(model) { /* do something */ });
It is not the most beautiful implementation, but I'm sure you get the picture :)
Not sure if this is the right way, it could be a bit expensive but this how i did it.
I think the trick is to pull all your data and then looking for an id match.
Model.find(function(err, data) {
if (err) //handle it
for (var i=0; i<array.length; i++) {
for (var j=0; ij<data.length; j++) {
if(data[j].id == array[i].id) {
// do something
}
}
}
}

JS Closures, Redis, loop, Async :: empty array

I give up on this. May some of the wise stackoverflow monks please fix my bugs?
Code is self explaining. Client sends room names, server does a redis lookup and pushes valid rooms to the array. After adding all the rooms, the list should be emitted to the client.
Problem is closure, async etc. based. I understand the problem but cannot get a workaround because the array needs to remain inside the function. Tricky.
Code:
function roomList(socket){
var roomlist = [], rooms = getRooms(), p = /pChannel_/;
redis.select(7, function(err,res){
for (var k in rooms){
if(rooms[k] != '' && p.test(rooms[k])){
var key = 'channel:'+rooms[k];
redis.hgetall(key, function (err, reply) {
if(reply){
var c = io.sockets.manager.rooms[rooms[k]];
roomlist.push( Array(reply['name'],c.length,reply['icon']) );
}
else { console.log('nothing found'); }
});
}
}
// here be dragons
console.log(roomlist);
socket.emit('roomList', roomlist);
});
}
Thanks.
C'mon guys. The OP explicitly said she/he is interested by understanding how things are supposed to work. And you don't need Q or async or any other 3rd party modules to implement this.
In the initial code, there are two problems:
with Javascript, the closure scope is at function level, not block level. A function must be introduced to define a proper closure. Here, a simple forEach can be used.
the final step (i.e. emit) is not run after the replies have been received from Redis. It must be called in the loop itself. In order to achieve it, it is required to count the items so that the inner callback can test whether the process is complete or not.
So here is another version:
function roomList(socket){
var roomlist = [], rooms = getRooms(), p = /pChannel_/;
redis.select(7, function(err,res){
var count = rooms.length
rooms.forEach( function(r) {
if( r != '' && p.test(r) ) {
var key = 'channel:'+r
redis.hgetall(key, function (err, reply) {
if(reply) {
var c = io.sockets.manager.rooms[r];
roomlist.push( Array(reply['name'],c.length,reply['icon']) );
} else {
console.log('nothing found');
}
if ( --count <= 0 ) {
// here be dragons
console.log(roomlist);
socket.emit('roomList', roomlist);
}
});
} else --count;
});
});
}
Looks like a job for async.map:
function roomList(socket){
var rooms = getRooms(), p = /pChannel_/;
redis.select(7, function(err, res) {
async.map(rooms, function(room, callback) {
if (room === '' || ! p.test(room))
return callback(null, null);
var key = 'channel:' + room;
var c = io.sockets.manager.rooms[room];
redis.hgetall(key, function (err, reply) {
if (err)
callback(err); // propagate Redis errors to final callback, don't know
// if you want that or not; use 'callback(null)' if not.
else
if (reply)
callback(err, Array(reply.name, c.length, reply.icon) );
else
callback(err, null);
});
}, function(err, roomlist) {
if (err)
// handle Redis errors...
// filter 'null' entries from roomlist
roomlist = roomlist.filter(function(room) { return room !== null });
console.log(roomlist);
socket.emit('roomList', roomlist);
});
});
}
(untested)
If you just want to wait for the room list to be fully built before emitting the response (as seems highly reasonable), and assuming Q to be available, then you should just need a few additional lines of Q magic plus a closure-forming wrapper around the inner code to maintain a reliable reference to a Q deferred at each pass of the for loop.
function roomList(socket) {
redis.select(7, function(err, res) {
var list = [],
rooms = getRooms(),
p = /pChannel_/,
promises = [];
for(var k in rooms) {
if(rooms[k] != '' && p.test(rooms[k])) {
(function(dfrd) {
promises.push(dfrd.promise);
var key = 'channel:' + rooms[k];
redis.hgetall(key, function(err, reply) {
if(reply) {
var c = io.sockets.manager.rooms[rooms[k]];
list.push( [reply['name'], c.length, reply['icon']] );
}
else {
console.log('nothing found');
}
dfrd.resolve();
});
})(Q.defer());
}
}
Q.all(promises).then(function() {
console.log(list);
socket.emit('roomList', list);
});
});
}

Resources