MongoDB: Waiting for available socket hanging - node.js

So I am using Mongoose's findOneAndUpdate to update a key value in a document. I am needing to increment that number by one. If it's not in there it should create a new document. It's in a post route in Node.
I have information going to the post route on a button click. My app requires users to click a button multiple times in succession and i'm getting a Waiting for available sockets... at the bottom of my browser and everything hangs.
I am just doing this on my localhost. At the end of the request once everything "unhangs" I get the following error in my console:
POST http://localhost:3000/api/songsqueued net::ERR_EMPTY_RESPONSE
This is the post route i'm working with:
router.post('/api/songsqueued', function(req, res){
Song.findOneAndUpdate({songId: req.body.songId}, { $inc:{queueTimes: 1} },
function(err, song){
if (err) {
return console.log("Error: " + err)
}
if(song){
song.queueTimes += 1;
song.save(function(err){
if(err){
console.log(err)
} else {
console.log("updated fam")
}
});
} else {
var song = new Song();
song.artistName = req.body.artistName;
song.titleName = req.body.titleName;
song.songId = req.body.songId;
song.songImg = req.body.songImg;
song.save(function(err) {
if (err) {
console.log("Error: " + err)
} else {
console.log("created fam")
}
})
console.log(song);
return res.json({message: "SongCreated"})
}
})
})
Any ideas on how to remedy this?

Your node server needs to send a response to the client in order for the response to complete. If you don't send a response, the connection will timeout and you get the socket hangup. Make sure that you send a response for each condition:
if (err) {
return res.status(500).end("some error message");
}
else if (song) {
return res.status(200).end("updated");
}
else {
return res.status(201).end("created");
}

Related

How to return database data from NodeJS Express routes?

Working with NodeJS for the first time, trying to build a public endpoint that an accept an XML file, convert it to JSON, save it to MongoDB, then send a 200 HTTP status code, if everything went well. But the Express route completes and sends a response long before the code writing to the database completes.
A slightly simplified version:
app.post('/ams', function (req, res) {
try {
if (Object.keys(req.body).length === 0) {
console.log("request body was empty");
// throw new EmptyBodyException(req.body);
message = message + "Request body was empty \n";
}
let body_keys = Object.keys(req.body);
let body_values = Object.values(req.body);
let xml_string = body_keys[0] + ":" + body_values[0];
let xml = xml_string.trim();
console.log("The trimmed XML:");
console.log(xml);
// convert XML to JSON
xml2js.parseString(xml, (err, json) => {
if(err) {
message = "xml2js.parseString failed, xml was:." + xml + "\n\n" + err;
console.log(message);
res.status(500);
res.send(message);
}
const documentType = json.Document.DocumentType;
if (documentType == 945) {
const shipment = json.Document.Shipment[0];
const shipment_header = shipment.ShipmentHeader[0];
const addresses = shipment.Addresses[0].Address;
const order_header = shipment.Orders[0].Order[0].OrderHeader[0];
const cartons = shipment.Orders[0].Order[0].Cartons[0].Carton;
const unique_id = shipment_header.ShipmentID[0];
found_document_promise = document_is_redundant(AMS_945, unique_id);
found_document_promise.then(function (found_document) {
if (found_document != null) {
console.log("Redundant document. Perhaps a Pair Eyewear engineer was running a test?");
res.status(200);
message = "Redundant document. Perhaps a Pair Eyewear engineer was running a test? documentType: " + documentType;
res.send(message);
} else {
new AMS_945({
shipment_header : shipment_header,
addresses : addresses,
order_header : order_header,
cartons : cartons,
unique_id : unique_id
})
.save()
.then(function () {
// console.log("saved AMS_945");
message = "saved AMS_945";
res.status(200);
res.send(message);
})
.catch(function (err) {
message = "error when saving AMS_945 to database: " + "\n\n" + err;
console.log(message);
res.status(500);
res.send(message);
});
}
})
.catch(function(err) {
message = "error when checking for a redundant AMS_945 document: " + "\n\n" + err;
console.log(message);
res.status(500);
res.send(message);
});
// down at the bottom I have some generic catch-all:
res.status(200);
res.send("Catch all response.")_;
If I don't have the catch all response at the end then the connection simply hangs until the 30 second timeout is hit, and then I get 504 Bad Gateway.
With the catch-all response at the bottom, thankfully I don't get the timeout error, but I do get an error "Sending headers after response was sent" because at some point the database code returns and tries to send its response, but this is long after the Express route function has completed and sent that generic catch-all code that I have at the bottom.
I'd be happy to get rid of the catch-all res.send() and just have the res.send() inside the database code, but that never seems to return.
So how to I get the Express route function to wait till the database code has returned?

Node.js stops listening (gets stuck) after executing query more than 6 times

I am using expressJs, This is the query which I am executing it multiple time
router.post('/Index_api/Reminder/Delete',function(req, res)
{
Reminder_Id = req.body;
DeleteRow('Reminders','Reminder_Id',Reminder_Id.Reminder_Id);
});
int this is the DeleteRow Function
function DeleteRow(TableName,WCC, id)
{
var query ="Delete FROM `"+TableName + "` WHERE `"+ WCC +"` =" + id;
conn.query(query,function(err,result)
{
if(err)
{
console.error(err);
return;
}else{
console.log(result);
}
});
}
I am posting data to this route like this:
function DeleteRow(id)
{
$.post('/Index_api/Reminder/Delete',{
Reminder_Id:id
});
$("#row"+id).remove();
}
if I want to delete 6 records together there is no problem but by the 7th one is not executed and gets stuck.
I am also using nodemon and reload package.
Your router.post() route handler is not returning any response. Thus, the browser is still waiting for some sort of response. The browser has a maximum number of connections it will make to any given host so once you have too many connections all sitting there waiting for a response, then the browser queues the next requests until one of the prior ones finishes.
Eventually those requests will time out, but that can take a long time.
To fix, just send a response from your route handler:
router.post('/Index_api/Reminder/Delete',function(req, res) {
Reminder_Id = req.body;
DeleteRow('Reminders','Reminder_Id',Reminder_Id.Reminder_Id);
res.send("ok");
});
Or, if you want the post to actually respond after DeleteRow() is done, you can let DeleteRow() send the right response by passing res to it and letting it send the response.
router.post('/Index_api/Reminder/Delete',function(req, res) {
Reminder_Id = req.body;
DeleteRow('Reminders','Reminder_Id',Reminder_Id.Reminder_Id, res);
});
function DeleteRow(TableName, WCC, id, res) {
var query = "Delete FROM `" + TableName + "` WHERE `" + WCC + "` =" + id;
conn.query(query, function(err, result) {
if (err) {
console.error(err);
res.status(500).send("delete failed");
} else {
res.send("delete OK");
}
});
}
Then, you should probably also change your client code to actually look at the returned status and act accordingly.
function DeleteRow(id) {
$.post('/Index_api/Reminder/Delete',{
Reminder_Id:id
}).then(function(result) {
if (result.status == 200) {
// successful
$("#row"+id).remove();
} else {
// handle server error here
}
}, function(err) {
// network error here
});
}

loopback after delete customize the result sent to client

In my loopback application i have to customize the delete result sent to the client.
xxxxx.observe('before delete', function(ctx, next) {
xxxxx.find({
where: {
id: ctx.where.id
}
}, function (err, transaction) {
if(yyyy.length > 0){
var id = yyyy[0]['id'];
pgclient.query("delete from qqq where kkkk = '"+id+"' ", function(err, dbRes){
if(err){
next(err);
} else{
next();----------------> here i have to customize the result
}
});
} else {
next({"message":" for the given id"});
}
});
});
Now the result sent to client is:
{
"count": 1
}
But i need to modify this as:
{"message":"Deleted Successfully"}.
I tried a lot regarding this, but no result. Please share your ideas.
You can't. This is operation hook and next callback is just for determining successful/failure of operation.
For customizing messages you should do in remote method or remote hooks

Node.js and MongoDB on MongoLab: "Sockets Closed" On Every Insert

I'm trying to do something relatively simple and am running into a "server ...-a.mongolab.com:36648 sockets closed" error all of a sudden every time I try to do an "insert".
Reads seem to work without error, but inserts seem to get an error every time and I'm not sure if it's my code (which recently underwent minor changes), or a reliability problem with the free server I'm using at MongoLab (which recently showed itself to be down for a few minutes).
Oddly enough, the record itself seems to save okay, I just get the error back!
Can anyone see an issue with my code, or could this be something else?
var mongoClient = require('mongodb').MongoClient;
var http = require('http');
var connectionString = "...";
var pictureWallsCollectionName = 'PictureWalls';
//this is what barfs. see *** details
exports.saveWall = function (req, res) {
//reformat
var toSave = {
_id: req.body.wallId,
pictures: req.body.pictures
};
var status;
mongoClient.connect(connectionString, function (err, db) {
if (err) { return console.error(err); }
var collection = db.collection(pictureWallsCollectionName);
//*** no err yet... ***
collection.insert(
toSave,
function (error, response) {
//*********************
//*** err here! ******
//*********************
db.close();
if (error) {
console.error(error);
//bad
status = 500;
}
else {
console.log('Inserted into the ' + collection_name + ' collection');
//good
status = 200;
}
});
response.status(status).end(http.STATUS_CODES[status]);
});
}
//this seems to work pretty reliably. including it just in case it's relevant
exports.findByWallId = function (req, res) {
var id = req.params.id;
console.log('Retrieving wall: ' + id);
mongoClient.connect(connectionString, function (err, db) {
if (err) { return console.dir(err); }
var collection = db.collection(pictureWallsCollectionName);
collection.findOne(
{ _id: id },
function (err, item) {
db.close();
if (err) {
console.error(err);
//something bad happened
var status = 500;
res.status(status).end(http.STATUS_CODES[status]);
}
else {
console.log('Found wall with ID ' + id);
//reformat and send back in the response
res.send({
wallId: item._id,
pictures: item.pictures
});
}
}
);
});
};
EDIT: Part of my original issue was duplicate parameter names. See the linked question for detail.
ORIGINAL RESPONSE:
The issue ended up being that I was calling:
res.status(status).end(http.STATUS_CODES[status]);
...before the async insert was finished, so it barfed.
However, I'm not exactly sure how to issue the response in this case. See my new question here:
How Do I Properly Issue Response To Post When Waiting For Async Method To Complete?

Trying to implement callback, or some way to wait until my get request is done. This is in node.js express

Im trying to implement some way to stop my code to redirect me before I get the response from the omdb api I am using.
My function for making a search for a movie and saving all titles in a session looks like this:
app.post('/search', isLoggedIn, function(req, res) {
function getMovies(arg, callback){
console.log('In getMovies');
console.log('searching for '+arg);
omdb.search(arg, function(err, movies) {
if(err) {
return console.error(err);
}
if(movies.length < 1) {
return console.log('No movies were found!');
}
var titles = [];
movies.forEach(function(movie) {
// If title exists in array, dont push.
if(titles.indexOf(movie.title) > -1){
console.log('skipped duplicate title of '+movie.title);
}
else{
titles.push(movie.title);
console.log('pushed '+movie.title);
}
});
// Saves the titles in a session
req.session.titles = titles;
console.log(req.session.titles);
});
// Done with the API request
callback();
}
var title = req.body.title;
getMovies(title, function() {
console.log('Done with API request, redirecting to GET SEARCH');
res.redirect('/search');
});
});
However I dont know if I implement callback in the right way, because I think there can be a problem with the api request actually executing before the callback, but not finishing before. And therefor the callback is working..
So I just want 2 things from this question. Does my callback work? And what can I do if a callback won't solve this problem?
Thankful for all answers in the right direction.
Add
callback();
To, like this
omdb.search(arg, function(err, movies) {
if (err) {
return console.error(err);
}
if (movies.length < 1) {
return console.log('No movies were found!');
}
var titles = [];
movies.forEach(function(movie) {
// If title exists in array, dont push.
if (titles.indexOf(movie.title) > -1) {
console.log('skipped duplicate title of ' + movie.title);
} else {
titles.push(movie.title);
console.log('pushed ' + movie.title);
}
});
// Saves the titles in a session
req.session.titles = titles;
callback();
});
omdb.search is asynchronous function that's why callback executed before omdb.search

Resources