I'm updating an object in an array with Mongoose. After it updates I'm firing res.send() in the callback.
If I don't fire the res.send() the update saves fine. But when I do res.send(), the entire object in the array is erased from Mongo.
landmarkSchema.findById(tileRes.worldID, function(err, lm) {
if (!lm){
console.log(err);
}
else if (req.user._id == lm.permissions.ownerID){
var min = tileRes.zooms[0];
var max = tileRes.zooms.slice(-1)[0];
if (lm.style.maps.localMapArray){
for (var i = 0; i < lm.style.maps.localMapArray.length; i++) { //better way to do this with mongo $set
if (lm.style.maps.localMapArray[i].map_marker_viewID == req.body.map_marker_viewID) {
lm.style.maps.localMapArray[i]['temp_upload_path'] = '';
lm.style.maps.localMapArray[i]['localMapID'] = tileRes.mapURL;
lm.style.maps.localMapArray[i]['localMapName'] = tileRes.worldID;
lm.markModified('style.maps.localMapArray');
lm.save(function(err, landmark) {
if (err){
console.log('error');
}
else {
console.log('updated map',landmark);
//res.status(200).send(landmark);
}
});
}
}
}
}
});
Is it a write issue where Mongo doesn't finish writing before res.send is fired?
I recommend start using async to iterate in these cases, of course you can do this without it, but you'll have to find a very good reason to avoid using it.
I don't see the rest of your method, but using async it should be similar to this:
async.each(lm.style.maps.localMapArray, function(localMap, callback) {
// Do whatever you like here
//....
// Call method that requires a callback and pass the loop callback
localMap.iUseACallbackAfterDoingMyStuff(callback);
}, function(err){
// now here the loop has ended
if( err ) {
// Something happened..
} else {
res.status(200).send(somethingToTheClient);
}
});
The code snippet is just for you to get the idea.
Related
I want to upsert document in the following way
for (var i = 0; i < req.body.app_events.length; i++ ) {
console.log(req.body.app_events[i].event_key);
//delete upsertData._id;
Appusers.update({app_key: req.body.app_key, e_key:req.body.app_events[i].event_key}, {
$set : {
app_key:req.body.app_key,
e_key: req.body.app_events[i].event_key,
e_name: req.body.app_events[i].event_name
}}, { upsert: true}, function(err, data){
if(err) return console.log(err);
console.log(data);
});
}
it is creating a single document with _id only. i want to insert document if it exist otherwise update on the basis of e_key and app_key.
You really should not be calling asynchronous functions inside a synchronous loop. What you need is something that repects the callback on completion of the loop cycle and will alert when the update is complete. This makes incrementing counters externally safe.
Use something like async.whilst for this:
var i = 0;
async.whilst(
function() { return i < req.body.app_events.length; },
function(callback) {
console.log(req.body.app_events[i].event_key);
//delete upsertData._id;
Appusers.findOneAndUpdate(
{ app_key: req.body.app_key, e_key:req.body.app_events[i].event_key},
{
$set : {
app_key:req.body.app_key,
e_key: req.body.app_events[i].event_key,
e_name: req.body.app_events[i].event_name
}
},
{ upsert: true},
function(err,data) {
if (err) callback(err);
console.log(data);
i++;
callback();
}
);
},
function(err) {
if (err)
console.log(err);
else
// done;
}
);
Now the loop is wrapped with a "callback" which is called in itself within the callback to the update method. Also if you expect a "document" back then you should be using .findOneAndUpdate() as .update() just modifies the content and returns the number affected.
When the loop is complete or when an error is passed to the callback, then handling is moved to the last function block, where you complete your call or call other callbacks as required.
Better than above. Dig into the native driver methods for Bulk operations. You need to be careful that you have an open connection to the database already established. If unsure about this, then try to always wrap application logic in:
mongoose.connection('once',function(err) {
// app logic here
});
Which makes sure the connections have been made. The mongoose methods themselves "hide" this away, but the native driver methods have no knowledge.
But this is the fastest possible listing to update the data:
var i = 0;
var bulk = Appusers.collection.initializeOrderedBulkOp();
async.whilst(
function() { return i < req.body.app_events.length; },
function(callback) {
console.log(req.body.app_events[i].event_key);
bulk.find(
{ app_key: req.body.app_key, e_key:req.body.app_events[i].event_key},
).upsert().updateOne({
$set : {
app_key:req.body.app_key,
e_key: req.body.app_events[i].event_key,
e_name: req.body.app_events[i].event_name
}
});
i++;
if ( i % 1000 == 0) {
bulk.execute(function(err,response) {
if (err) callback(err);
console.log(response);
bulk = Appusers.collection.initializeOrderedBulkOp();
callback();
})
} else {
callback();
}
},
function(err) {
if (err)
console.log(err);
else {
if ( i % 1000 != 0 )
bulk.execute(function(err,response) {
if (err) console.log(err)
console.log(response);
// done
});
else
// done
}
}
);
The Bulk methods build up "batches" of results ( in this case 1000 at a time ) and send all to the server in one request with one response ( per batch ). This is a lot more efficient than contacting the database once per every write.
I have a problem using Node JS when making synchronous calls.. Here's my problem:
I have the following code:
async.doWhilst(function(callback) {
//some code
callback();
}, function() {
//make a database call and based on the results I should
//return true to continue looping or false to stop here
}, function(err) {
//do some things when the loop finishes
})
The problem is when calling the database it is asynchronous call and the loop continues before even returning the proper value.
Thank you alot for your comments, I have solved the problem by move the database call to the loop code like this:
var results = []
async.doWhilst(function(callback) {
//some code
sequelize.query('some query').success(function(result) {
results = result;
callback();
});
}, function() {
//use the results variable that is fetched from the database
//return true to continue looping or false to stop here
}, function(err) {
//do some things when the loop finishes
})
You can have Sequelize return once the data has actually returned using this method:
return sequelize
.query("Database query here")
.success(function(result) {
//Do something with result here
else {
//Return error
}
});
Hope the following can help:
(function iterate() {
executeDatabaseCall(..., function(e,r) {
if (...conditions to continue looping are verified...)
iterate();
else
...manage the end of loop...
});
})();
If you really want to use sync call, you can use this library :
https://github.com/luciotato/waitfor
It'll permit you to make sync call using :
var syncDataReceived = wait.forMethod(collection,'insert',{data:toinsert});
I have solved the problem by move the database call to the loop code like this:
var results = []
async.doWhilst(function(callback) {
//some code
sequelize.query('some query').success(function(result) {
results = result;
callback();
});
}, function() {
//use the results variable that is fetched from the database
//return true to continue looping or false to stop here
}, function(err) {
//do some things when the loop finishes
})
I want to track the number of documents I have within a collection in a node.js server
using mongodb driver. I can insert, delete and update propperly but when I try to count, it works until I try to store that value, moment in which it returns nothing.
Here is my code:
var db_collection = db.collection('collection');
var countCollections = function () {
var response_count_collections = null;
db_mensajes.count(function(err,number_of_collections){
if (err){
console.log(err);
} else {
response_of_collections = number_of_collections;
console.log('Number of collections: '+number_of_collections);
}
});
return response_count_collections;
};
It logs the number_of_collections correctly but it doesn't return me the right value. In my example it returns null (how I defined my var response_count_collections;) and if I
try to return number_of_collections; within the db_collections.count()'s callback like this:
var db_collection = db.collection('collection');
var countCollections = function () {
var response_count_collections = null;
db_mensajes.count(function(err,number_of_collections){
if (err){
console.log(err);
} else {
console.log('Number of collections: '+number_of_collections);
return number_of_collections;
}
});
};
It returns me "undefined". Is there any way to achieve what I want?
Its because it returns the variable before the function is completely executed.
If you want it to be asynchronous then you will have to learn to control the flow of the program using some node modules like async.
Update:
Have a look at this question, it shows how to return value from an async function using callback correctly.
Using mongoskin, I can do a query like this, which will return a cursor:
myCollection.find({}, function(err, resultCursor) {
resultCursor.each(function(err, result) {
}
}
However, I'd like to call some async functions for each document, and only move on to the next item on the cursor after this has called back (similar to the eachSeries structure in the async.js module). E.g:
myCollection.find({}, function(err, resultCursor) {
resultCursor.each(function(err, result) {
externalAsyncFunction(result, function(err) {
//externalAsyncFunction completed - now want to move to next doc
});
}
}
How could I do this?
Thanks
UPDATE:
I don't wan't to use toArray() as this is a large batch operation, and the results might not fit in memory in one go.
A more modern approach that uses async/await:
const cursor = db.collection("foo").find({});
while(await cursor.hasNext()) {
const doc = await cursor.next();
// process doc here
}
Notes:
This may be even more simple to do when async iterators arrive.
You'll probably want to add try/catch for error checking.
The containing function should be async or the code should be wrapped in (async function() { ... })() since it uses await.
If you want, add await new Promise(resolve => setTimeout(resolve, 1000)); (pause for 1 second) at the end of the while loop to show that it does process docs one after the other.
If you don't want to load all of the results into memory using toArray, you can iterate using the cursor with something like the following.
myCollection.find({}, function(err, resultCursor) {
function processItem(err, item) {
if(item === null) {
return; // All done!
}
externalAsyncFunction(item, function(err) {
resultCursor.nextObject(processItem);
});
}
resultCursor.nextObject(processItem);
}
since node.js v10.3 you can use async iterator
const cursor = db.collection('foo').find({});
for await (const doc of cursor) {
// do your thing
// you can even use `await myAsyncOperation()` here
}
Jake Archibald wrote a great blog post about async iterators, that I came to know after reading #user993683's answer.
This works with large dataset by using setImmediate:
var cursor = collection.find({filter...}).cursor();
cursor.nextObject(function fn(err, item) {
if (err || !item) return;
setImmediate(fnAction, item, arg1, arg2, function() {
cursor.nextObject(fn);
});
});
function fnAction(item, arg1, arg2, callback) {
// Here you can do whatever you want to do with your item.
return callback();
}
If someone is looking for a Promise way of doing this (as opposed to using callbacks of nextObject), here it is. I am using Node v4.2.2 and mongo driver v2.1.7. This is kind of an asyncSeries version of Cursor.forEach():
function forEachSeries(cursor, iterator) {
return new Promise(function(resolve, reject) {
var count = 0;
function processDoc(doc) {
if (doc != null) {
count++;
return iterator(doc).then(function() {
return cursor.next().then(processDoc);
});
} else {
resolve(count);
}
}
cursor.next().then(processDoc);
});
}
To use this, pass the cursor and an iterator that operates on each document asynchronously (like you would for Cursor.forEach). The iterator needs to return a promise, like most mongodb native driver functions do.
Say, you want to update all documents in the collection test. This is how you would do it:
var theDb;
MongoClient.connect(dbUrl).then(function(db) {
theDb = db; // save it, we'll need to close the connection when done.
var cur = db.collection('test').find();
return forEachSeries(cur, function(doc) { // this is the iterator
return db.collection('test').updateOne(
{_id: doc._id},
{$set: {updated: true}} // or whatever else you need to change
);
// updateOne returns a promise, if not supplied a callback. Just return it.
});
})
.then(function(count) {
console.log("All Done. Processed", count, "records");
theDb.close();
})
You can do something like this using the async lib. The key point here is to check if the current doc is null. If it is, it means you are finished.
async.series([
function (cb) {
cursor.each(function (err, doc) {
if (err) {
cb(err);
} else if (doc === null) {
cb();
} else {
console.log(doc);
array.push(doc);
}
});
}
], function (err) {
callback(err, array);
});
You could use a Future:
myCollection.find({}, function(err, resultCursor) {
resultCursor.count(Meteor.bindEnvironment(function(err,count){
for(var i=0;i<count;i++)
{
var itemFuture=new Future();
resultCursor.nextObject(function(err,item)){
itemFuture.result(item);
}
var item=itemFuture.wait();
//do what you want with the item,
//and continue with the loop if so
}
}));
});
You can get the result in an Array and iterate using a recursive function, something like this.
myCollection.find({}).toArray(function (err, items) {
var count = items.length;
var fn = function () {
externalAsyncFuntion(items[count], function () {
count -= 1;
if (count) fn();
})
}
fn();
});
Edit:
This is only applicable for small datasets, for larger one's you should use cursors as mentioned in other answers.
A more modern approach that uses for await:
const cursor = db.collection("foo").find({});
for await(const doc of cursor) {
// process doc here with await
await processDoc(doc);
}
You could use simple setTimeOut's. This is an example in typescript running on nodejs (I am using promises via the 'when' module but it can be done without them as well):
import mongodb = require("mongodb");
var dbServer = new mongodb.Server('localhost', 27017, {auto_reconnect: true}, {});
var db = new mongodb.Db('myDb', dbServer);
var util = require('util');
var when = require('when'); //npm install when
var dbDefer = when.defer();
db.open(function() {
console.log('db opened...');
dbDefer.resolve(db);
});
dbDefer.promise.then(function(db : mongodb.Db){
db.collection('myCollection', function (error, dataCol){
if(error) {
console.error(error); return;
}
var doneReading = when.defer();
var processOneRecordAsync = function(record) : When.Promise{
var result = when.defer();
setTimeout (function() {
//simulate a variable-length operation
console.log(util.inspect(record));
result.resolve('record processed');
}, Math.random()*5);
return result.promise;
}
var runCursor = function (cursor : MongoCursor){
cursor.next(function(error : any, record : any){
if (error){
console.log('an error occurred: ' + error);
return;
}
if (record){
processOneRecordAsync(record).then(function(r){
setTimeout(function() {runCursor(cursor)}, 1);
});
}
else{
//cursor up
doneReading.resolve('done reading data.');
}
});
}
dataCol.find({}, function(error, cursor : MongoCursor){
if (!error)
{
setTimeout(function() {runCursor(cursor)}, 1);
}
});
doneReading.promise.then(function(message : string){
//message='done reading data'
console.log(message);
});
});
});
I know node.js run asynchronously, so outer functions execute earlier than the inner. But what is the way to access the notification array outside the for loop? I would like to access all the values in array at once, is this feasible?
var notification=[];
for(var j=0;j<6; j++)
{
getNotification(response[j].sender_id,function(results) // a function called
{
notification[j] =results;
console.log(notification); // output: correct
});
}
console.log(notification); // output: [], need notification array values here
EDIT: If you don't want to use third party libs, this is how to do this in your own code.
/* jshint node:true*/
function getNotifications(responses, callbackToMainProgramLogic) {
'use strict';
var results = [];
function getNotificationAsync(response) {
getNotification(response.sender_id, function (data) {
results.push(data);
if (responses.length) {
getNotificationAsync(responses.pop());//If there are still responses, launch another async getNotification.
} else {
callbackToMainProgramLogic(results);//IF there aren't we're done, and we return to main program flow
}
});
}
getNotificationAsync(responses.pop());
}
getNotifications(someArrayOfResonses, function (dataFromNotifications) {
console.log('The collected data: ' + JSON.stringify(dataFromNotifications, 0, 4));
});
If you absolutely must, you could do something ridiculous like this. Your logic in the loopUntilDatReceived would be waiting for array sizes, not waiting for a non-empty string, but the idea is similar, and you shouldn't be using this anyway! :)
var fileData = '';
fs.readFile('blah.js', function (err, data) { //Async operation, similar to your issue.
'use strict';
fileData = data;
console.log('The Data: ' + data);
});
function loopUntilDataReceived() {
'use strict';
process.nextTick(function () {//A straight while loop would block the event loop, so we do this once per loop around the event loop.
if (fileData === '') {
console.log('No Data Yet');
loopUntilDataReceived();
} else {
console.log('Finally: ' + fileData);
}
});
}
loopUntilDataReceived();
Did I mention this is ridiculous? Honestly, this is an awful idea, but it may help you understand what is going on and how the Node event loop works, and why what you want is not possible. AND why the other posts about callbacks, and flow control libraries are the way to go.
First off, you're having a closure issue in your code (please see the details here)
Then, you simply can't have the array values just next to the loop, because the values are not ready at this point.
You need to wait until all 6 of your getNotification calls get resolved. You can do that with the async library. Something like:
var notification = [];
function createRequest (index) {
return function (callback) {
getNotification(response[index].sender_id, function(results) {
notification[index] = results;
callback(results);
});
}
}
var requests = [];
for(var j=0;j<6; j++) {
requests.push(createRequest(j));
}
async.parallel(requests, function (allResults) {
// notifications array is ready at this point
// the data should also be available in the allResults array
console.log(notifications);
});
Send a callback to the notification loop like this:
var notification=[];
getNotificationArray( function() {
console.log(notification);
});
function getNotificationArray (callback)
{
for(var j=0;j<6; j++)
{
getNotification(response[j].sender_id,function(results) // a function called
{
notification[j] =results;
console.log(notification); // output: correct
});
}
callback();
}