res.send after two forEach have finished executing - node.js

const collect = [];
req.body.product.forEach(function(entry) {
mongoClient.connect(databaseServerUrl, function(err, db) {
let testCollection = db.collection('Tests');
testCollection.find({Product: entry}).toArray((err, docs) => {
let waiting = docs.length;
docs.forEach(function (doc) {
collect.push(doc);
finish();
});
function finish() {
waiting--;
if (waiting === 0) {
res.send(collect);
}
}
});
db.close();
});
});
this is only getting back the first set. If I have two nodes in my array of req.body.product for example. I am only getting back the first set. But I need to get back everything not just from one Collection.

Rather than performing two queries and combining the results into one array, I suggest performing a single query that gets all of the results, which would look something like this:
mongoClient.connect(databaseServerUrl, function(err, db) {
const query = { $or: req.body.product.map(Product => ({ Product })) };
db.collection('Tests').find(query).toArray((err, docs) => {
// ...handle `err` here...
res.send(docs);
db.close();
});
});
Note that I haven't tested this since I don't have a MongoDB database in front of me.

your mongoClient.connect() is asyncronous but your loop just execute without waiting for the callback.
Try async forEach loop: enter link description here
This should solve your problem

Related

how can run mongoose query in forEach loop

can anyone help me for how can run mongoose query in forEach loop in nodejs and suggest for inner join result need of both collections
like below details
userSchema.find({}, function(err, users) {
if (err) throw err;
users.forEach(function(u,i){
var users = [];
jobSchema.find({u_sno:s.u.sno}, function(err, j) {
if (err) throw err;
if (!u) {
res.end(JSON.stringify({
status: 'failed:Auction not found.',
error_code: '404'
}));
console.log("User not found.");
return
}
users.push(j);
})
})
res.send(JSON.stringify({status:"success",message:"successfully done",data:{jobs:j,users:u}}));
})
Schema.find() is an async function. So your last line of code will execute while you wait for the first job search is executed in your loop. I suggest change it to Promises and use Promise.all(array).
To do so, first you have to change to use Promise with mongoose. you can do this with bluebird like this:
var mongoose = require('mongoose');
mongoose.Promise = require('bluebird');
Then you can use Promises instead of callbacks like this:
userSchema.find({}).then(function(users) {
var jobQueries = [];
users.forEach(function(u) {
jobQueries.push(jobSchema.find({u_sno:s.u.sno}));
});
return Promise.all(jobQueries );
}).then(function(listOfJobs) {
res.send(listOfJobs);
}).catch(function(error) {
res.status(500).send('one of the queries failed', error);
});
EDIT How to list both jobs and users
If you want to have a structure like:
[{
user: { /* user object */,
jobs: [ /* jobs */ ]
}]
you could merge the lists together. listOfJobs is in the same order as the jobQueries list, so they are in the same order as the users. Save users to a shared scope to get access to the list in the 'then function' and then merge.
..
}).then(function(listOfJobs) {
var results = [];
for (var i = 0; i < listOfJobs.length; i++) {
results.push({
user: users[i],
jobs: listOfJobs[i]
});
}
res.send(results);
}).catch(function(error) {
res.status(500).send('one of the queries failed', error);
});
A nice elegant solution is to use the cursor.eachAsync() function. Credit to https://thecodebarbarian.com/getting-started-with-async-iterators-in-node-js.
The eachAsync() function executes a (potentially async) function for
each document that the cursor returns. If that function returns a
promise, it will wait for that promise to resolve before getting the
next document. This is the easiest way to exhaust a cursor in
mongoose.
// A cursor has a `.next()` function that returns a promise. The promise
// will resolve to the next doc if there is one, or null if they are no
// more results.
const cursor = MyModel.find().sort({name: 1 }).cursor();
let count = 0;
console.log(new Date());
await cursor.eachAsync(async function(doc) {
// Wait 1 second before printing first doc, and 0.5 before printing 2nd
await new Promise(resolve => setTimeout(() => resolve(), 1000 - 500 * (count++)));
console.log(new Date(), doc);
});
No need to use forEach() which is synchronous and being called in an asynchronous fashion, that will give you wrong results.
You can use the aggregation framework and use $lookup which performs a left outer join to another collection in the same database to filter in documents from the "joined" collection for processing.
So the same query can be done using a single aggregation pipeline as:
userSchema.aggregate([
{
"$lookup": {
"from": "jobs", /* underlying collection for jobSchema */
"localField": "sno",
"foreignField": "u_sno",
"as": "jobs"
}
}
]).exec(function(err, docs){
if (err) throw err;
res.send(
JSON.stringify({
status: "success",
message: "successfully done",
data: docs
})
);
})
You can use this:
db.collection.find(query).forEach(function(err, doc) {
// ...
});

Make Node.js code synchronous in Mongoose while iterating

I am learning Node.js; due to asynchronous of Node.js I am facing an issue:
domain.User.find({userName: new RegExp(findtext, 'i')}).sort('-created').skip(skip).limit(limit)
.exec(function(err, result) {
for(var i=0;i<result.length;i++){
console.log("result is ",result[i].id);
var camera=null;
domain.Cameras.count({"userId": result[i].id}, function (err, cameraCount) {
if(result.length-1==i){
configurationHolder.ResponseUtil.responseHandler(res, result, "User List ", false, 200);
}
})
}
})
I want to use result in Cameras callback but it is empty array here, so is there anyway to get it?
And this code is asynchronous, is it possible if we make a complete function synchronous?
#jmingov is right. You should make use of the async module to execute parallel requests to get the counts for each user returned in the User.find query.
Here's a flow for demonstration:
var Async = require('async'); //At the top of your js file.
domain.User.find({userName: new RegExp(findtext, 'i')}).sort('-created').skip(skip).limit(limit)
.exec(function(err, result) {
var cameraCountFunctions = [];
result.forEach(function(user) {
if (user && user.id)
{
console.log("result is ", user.id);
var camera=null; //What is this for?
cameraCountFunctions.push( function(callback) {
domain.Cameras.count({"userId": user.id}, function (err, cameraCount) {
if (err) return callback(err);
callback(null, cameraCount);
});
});
}
})
Async.parallel(cameraCountFunctions, function (err, cameraCounts) {
console.log(err, cameraCounts);
//CameraCounts is an array with the counts for each user.
//Evaluate and return the results here.
});
});
Try to do async programing allways when doing node.js, this is a must. Or youll end with big performance problems.
Check this module: https://github.com/caolan/async it can help.
Here is the trouble in your code:
domain.Cameras.count({
"userId": result[i].id
}, function(err, cameraCount) {
// the fn() used in the callback has 'cameraCount' as argument so
// mongoose will store the results there.
if (cameraCount.length - 1 == i) { // here is the problem
// result isnt there it should be named 'cameraCount'
configurationHolder.ResponseUtil.responseHandler(res, cameraCount, "User List ", false, 200);
}
});

How can I use a cursor.forEach() in MongoDB using Node.js?

I have a huge collection of documents in my DB and I'm wondering how can I run through all the documents and update them, each document with a different value.
The answer depends on the driver you're using. All MongoDB drivers I know have cursor.forEach() implemented one way or another.
Here are some examples:
node-mongodb-native
collection.find(query).forEach(function(doc) {
// handle
}, function(err) {
// done or error
});
mongojs
db.collection.find(query).forEach(function(err, doc) {
// handle
});
monk
collection.find(query, { stream: true })
.each(function(doc){
// handle doc
})
.error(function(err){
// handle error
})
.success(function(){
// final callback
});
mongoose
collection.find(query).stream()
.on('data', function(doc){
// handle doc
})
.on('error', function(err){
// handle error
})
.on('end', function(){
// final callback
});
Updating documents inside of .forEach callback
The only problem with updating documents inside of .forEach callback is that you have no idea when all documents are updated.
To solve this problem you should use some asynchronous control flow solution. Here are some options:
async
promises (when.js, bluebird)
Here is an example of using async, using its queue feature:
var q = async.queue(function (doc, callback) {
// code for your update
collection.update({
_id: doc._id
}, {
$set: {hi: 'there'}
}, {
w: 1
}, callback);
}, Infinity);
var cursor = collection.find(query);
cursor.each(function(err, doc) {
if (err) throw err;
if (doc) q.push(doc); // dispatching doc to async.queue
});
q.drain = function() {
if (cursor.isClosed()) {
console.log('all items have been processed');
db.close();
}
}
Using the mongodb driver, and modern NodeJS with async/await, a good solution is to use next():
const collection = db.collection('things')
const cursor = collection.find({
bla: 42 // find all things where bla is 42
});
let document;
while ((document = await cursor.next())) {
await collection.findOneAndUpdate({
_id: document._id
}, {
$set: {
blu: 43
}
});
}
This results in only one document at a time being required in memory, as opposed to e.g. the accepted answer, where many documents get sucked into memory, before processing of the documents starts. In cases of "huge collections" (as per the question) this may be important.
If documents are large, this can be improved further by using a projection, so that only those fields of documents that are required are fetched from the database.
var MongoClient = require('mongodb').MongoClient,
assert = require('assert');
MongoClient.connect('mongodb://localhost:27017/crunchbase', function(err, db) {
assert.equal(err, null);
console.log("Successfully connected to MongoDB.");
var query = {
"category_code": "biotech"
};
db.collection('companies').find(query).toArray(function(err, docs) {
assert.equal(err, null);
assert.notEqual(docs.length, 0);
docs.forEach(function(doc) {
console.log(doc.name + " is a " + doc.category_code + " company.");
});
db.close();
});
});
Notice that the call .toArray is making the application to fetch the entire dataset.
var MongoClient = require('mongodb').MongoClient,
assert = require('assert');
MongoClient.connect('mongodb://localhost:27017/crunchbase', function(err, db) {
assert.equal(err, null);
console.log("Successfully connected to MongoDB.");
var query = {
"category_code": "biotech"
};
var cursor = db.collection('companies').find(query);
function(doc) {
cursor.forEach(
console.log(doc.name + " is a " + doc.category_code + " company.");
},
function(err) {
assert.equal(err, null);
return db.close();
}
);
});
Notice that the cursor returned by the find() is assigned to var cursor. With this approach, instead of fetching all data in memory and consuming data at once, we're streaming the data to our application. find() can create a cursor immediately because it doesn't actually make a request to the database until we try to use some of the documents it will provide. The point of cursor is to describe our query. The 2nd parameter to cursor.forEach shows what to do when the driver gets exhausted or an error occurs.
In the initial version of the above code, it was toArray() which forced the database call. It meant we needed ALL the documents and wanted them to be in an array.
Also, MongoDB returns data in batch format. The image below shows, requests from cursors (from application) to MongoDB
forEach is better than toArray because we can process documents as they come in until we reach the end. Contrast it with toArray - where we wait for ALL the documents to be retrieved and the entire array is built. This means we're not getting any advantage from the fact that the driver and the database system are working together to batch results to your application. Batching is meant to provide efficiency in terms of memory overhead and the execution time. Take advantage of it, if you can in your application.
None of the previous answers mentions batching the updates. That makes them extremely slow 🐌 - tens or hundreds of times slower than a solution using bulkWrite.
Let's say you want to double the value of a field in each document. Here's how to do that fast 💨 and with fixed memory consumption:
// Double the value of the 'foo' field in all documents
let bulkWrites = [];
const bulkDocumentsSize = 100; // how many documents to write at once
let i = 0;
db.collection.find({ ... }).forEach(doc => {
i++;
// Update the document...
doc.foo = doc.foo * 2;
// Add the update to an array of bulk operations to execute later
bulkWrites.push({
replaceOne: {
filter: { _id: doc._id },
replacement: doc,
},
});
// Update the documents and log progress every `bulkDocumentsSize` documents
if (i % bulkDocumentsSize === 0) {
db.collection.bulkWrite(bulkWrites);
bulkWrites = [];
print(`Updated ${i} documents`);
}
});
// Flush the last <100 bulk writes
db.collection.bulkWrite(bulkWrites);
And here is an example of using a Mongoose cursor async with promises:
new Promise(function (resolve, reject) {
collection.find(query).cursor()
.on('data', function(doc) {
// ...
})
.on('error', reject)
.on('end', resolve);
})
.then(function () {
// ...
});
Reference:
Mongoose cursors
Streams and promises
Leonid's answer is great, but I want to reinforce the importance of using async/promises and to give a different solution with a promises example.
The simplest solution to this problem is to loop forEach document and call an update. Usually, you don't need close the db connection after each request, but if you do need to close the connection, be careful. You must just close it if you are sure that all updates have finished executing.
A common mistake here is to call db.close() after all updates are dispatched without knowing if they have completed. If you do that, you'll get errors.
Wrong implementation:
collection.find(query).each(function(err, doc) {
if (err) throw err;
if (doc) {
collection.update(query, update, function(err, updated) {
// handle
});
}
else {
db.close(); // if there is any pending update, it will throw an error there
}
});
However, as db.close() is also an async operation (its signature have a callback option) you may be lucky and this code can finish without errors. It may work only when you need to update just a few docs in a small collection (so, don't try).
Correct solution:
As a solution with async was already proposed by Leonid, below follows a solution using Q promises.
var Q = require('q');
var client = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/test';
client.connect(url, function(err, db) {
if (err) throw err;
var promises = [];
var query = {}; // select all docs
var collection = db.collection('demo');
var cursor = collection.find(query);
// read all docs
cursor.each(function(err, doc) {
if (err) throw err;
if (doc) {
// create a promise to update the doc
var query = doc;
var update = { $set: {hi: 'there'} };
var promise =
Q.npost(collection, 'update', [query, update])
.then(function(updated){
console.log('Updated: ' + updated);
});
promises.push(promise);
} else {
// close the connection after executing all promises
Q.all(promises)
.then(function() {
if (cursor.isClosed()) {
console.log('all items have been processed');
db.close();
}
})
.fail(console.error);
}
});
});
The node-mongodb-native now supports a endCallback parameter to cursor.forEach as for one to handle the event AFTER the whole iteration, refer to the official document for details http://mongodb.github.io/node-mongodb-native/2.2/api/Cursor.html#forEach.
Also note that .each is deprecated in the nodejs native driver now.
You can now use (in an async function, of course):
for await (let doc of collection.find(query)) {
await updateDoc(doc);
}
// all done
which nicely serializes all updates.
let's assume that we have the below MongoDB data in place.
Database name: users
Collection name: jobs
===========================
Documents
{ "_id" : ObjectId("1"), "job" : "Security", "name" : "Jack", "age" : 35 }
{ "_id" : ObjectId("2"), "job" : "Development", "name" : "Tito" }
{ "_id" : ObjectId("3"), "job" : "Design", "name" : "Ben", "age" : 45}
{ "_id" : ObjectId("4"), "job" : "Programming", "name" : "John", "age" : 25 }
{ "_id" : ObjectId("5"), "job" : "IT", "name" : "ricko", "age" : 45 }
==========================
This code:
var MongoClient = require('mongodb').MongoClient;
var dbURL = 'mongodb://localhost/users';
MongoClient.connect(dbURL, (err, db) => {
if (err) {
throw err;
} else {
console.log('Connection successful');
var dataBase = db.db();
// loop forEach
dataBase.collection('jobs').find().forEach(function(myDoc){
console.log('There is a job called :'+ myDoc.job +'in Database')})
});
I looked for a solution with good performance and I end up creating a mix of what I found which I think works good:
/**
* This method will read the documents from the cursor in batches and invoke the callback
* for each batch in parallel.
* IT IS VERY RECOMMENDED TO CREATE THE CURSOR TO AN OPTION OF BATCH SIZE THAT WILL MATCH
* THE VALUE OF batchSize. This way the performance benefits are maxed out since
* the mongo instance will send into our process memory the same number of documents
* that we handle in concurrent each time, so no memory space is wasted
* and also the memory usage is limited.
*
* Example of usage:
* const cursor = await collection.aggregate([
{...}, ...],
{
cursor: {batchSize: BATCH_SIZE} // Limiting memory use
});
DbUtil.concurrentCursorBatchProcessing(cursor, BATCH_SIZE, async (doc) => ...)
* #param cursor - A cursor to batch process on.
* We can get this from our collection.js API by either using aggregateCursor/findCursor
* #param batchSize - The batch size, should match the batchSize of the cursor option.
* #param callback - Callback that should be async, will be called in parallel for each batch.
* #return {Promise<void>}
*/
static async concurrentCursorBatchProcessing(cursor, batchSize, callback) {
let doc;
const docsBatch = [];
while ((doc = await cursor.next())) {
docsBatch.push(doc);
if (docsBatch.length >= batchSize) {
await PromiseUtils.concurrentPromiseAll(docsBatch, async (currDoc) => {
return callback(currDoc);
});
// Emptying the batch array
docsBatch.splice(0, docsBatch.length);
}
}
// Checking if there is a last batch remaining since it was small than batchSize
if (docsBatch.length > 0) {
await PromiseUtils.concurrentPromiseAll(docsBatch, async (currDoc) => {
return callback(currDoc);
});
}
}
An example of usage for reading many big documents and updating them:
const cursor = await collection.aggregate([
{
...
}
], {
cursor: {batchSize: BATCH_SIZE}, // Limiting memory use
allowDiskUse: true
});
const bulkUpdates = [];
await DbUtil.concurrentCursorBatchProcessing(cursor, BATCH_SIZE, async (doc: any) => {
const update: any = {
updateOne: {
filter: {
...
},
update: {
...
}
}
};
bulkUpdates.push(update);
// Updating if we read too many docs to clear space in memory
await this.bulkWriteIfNeeded(bulkUpdates, collection);
});
// Making sure we updated everything
await this.bulkWriteIfNeeded(bulkUpdates, collection, true);
...
private async bulkWriteParametersIfNeeded(
bulkUpdates: any[], collection: any,
forceUpdate = false, flushBatchSize) {
if (bulkUpdates.length >= flushBatchSize || forceUpdate) {
// concurrentPromiseChunked is a method that loops over an array in a concurrent way using lodash.chunk and Promise.map
await PromiseUtils.concurrentPromiseChunked(bulkUpsertParameters, (upsertChunk: any) => {
return techniquesParametersCollection.bulkWrite(upsertChunk);
});
// Emptying the array
bulkUpsertParameters.splice(0, bulkUpsertParameters.length);
}
}

MongoDB find returning zero results

I know this will be something small I'm missing, but would appreciate the help.
Here is my test script (node.js)
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect('mongodb://localhost:27017/myTestDB',
function (err, db) {
if (err)
debugger;
else {
db.collection('test', function (err, collection) {
collection.save({ name: "danny" }, function () { debugger;});
collection.find(function (err, results) {
if(results.items.length == 0){
///======> always = 0 !!!! WHY?!!!!
debugger;
}
});
});
}
db.close();
});
feel free to start your answer with "duh!"
UPDATE: you also need to move your db.close(); call inside the find callback or you're closing the connection before you're done with it.
In your example, results is a cursor, not an array of docs, so you need to call toArray on it to iterate over the cursor and get the array of docs. But you also need to put your find call inside the save callback. Otherwise the find is executing before the save has completed.
So something like this instead:
collection.save({ name: "danny" }, function () {
collection.find().toArray(function (err, results) {
// results contains the array of docs
// Now you can close the connection.
db.close();
});
});

Iterating over a mongodb cursor serially (waiting for callbacks before moving to next document)

Using mongoskin, I can do a query like this, which will return a cursor:
myCollection.find({}, function(err, resultCursor) {
resultCursor.each(function(err, result) {
}
}
However, I'd like to call some async functions for each document, and only move on to the next item on the cursor after this has called back (similar to the eachSeries structure in the async.js module). E.g:
myCollection.find({}, function(err, resultCursor) {
resultCursor.each(function(err, result) {
externalAsyncFunction(result, function(err) {
//externalAsyncFunction completed - now want to move to next doc
});
}
}
How could I do this?
Thanks
UPDATE:
I don't wan't to use toArray() as this is a large batch operation, and the results might not fit in memory in one go.
A more modern approach that uses async/await:
const cursor = db.collection("foo").find({});
while(await cursor.hasNext()) {
const doc = await cursor.next();
// process doc here
}
Notes:
This may be even more simple to do when async iterators arrive.
You'll probably want to add try/catch for error checking.
The containing function should be async or the code should be wrapped in (async function() { ... })() since it uses await.
If you want, add await new Promise(resolve => setTimeout(resolve, 1000)); (pause for 1 second) at the end of the while loop to show that it does process docs one after the other.
If you don't want to load all of the results into memory using toArray, you can iterate using the cursor with something like the following.
myCollection.find({}, function(err, resultCursor) {
function processItem(err, item) {
if(item === null) {
return; // All done!
}
externalAsyncFunction(item, function(err) {
resultCursor.nextObject(processItem);
});
}
resultCursor.nextObject(processItem);
}
since node.js v10.3 you can use async iterator
const cursor = db.collection('foo').find({});
for await (const doc of cursor) {
// do your thing
// you can even use `await myAsyncOperation()` here
}
Jake Archibald wrote a great blog post about async iterators, that I came to know after reading #user993683's answer.
This works with large dataset by using setImmediate:
var cursor = collection.find({filter...}).cursor();
cursor.nextObject(function fn(err, item) {
if (err || !item) return;
setImmediate(fnAction, item, arg1, arg2, function() {
cursor.nextObject(fn);
});
});
function fnAction(item, arg1, arg2, callback) {
// Here you can do whatever you want to do with your item.
return callback();
}
If someone is looking for a Promise way of doing this (as opposed to using callbacks of nextObject), here it is. I am using Node v4.2.2 and mongo driver v2.1.7. This is kind of an asyncSeries version of Cursor.forEach():
function forEachSeries(cursor, iterator) {
return new Promise(function(resolve, reject) {
var count = 0;
function processDoc(doc) {
if (doc != null) {
count++;
return iterator(doc).then(function() {
return cursor.next().then(processDoc);
});
} else {
resolve(count);
}
}
cursor.next().then(processDoc);
});
}
To use this, pass the cursor and an iterator that operates on each document asynchronously (like you would for Cursor.forEach). The iterator needs to return a promise, like most mongodb native driver functions do.
Say, you want to update all documents in the collection test. This is how you would do it:
var theDb;
MongoClient.connect(dbUrl).then(function(db) {
theDb = db; // save it, we'll need to close the connection when done.
var cur = db.collection('test').find();
return forEachSeries(cur, function(doc) { // this is the iterator
return db.collection('test').updateOne(
{_id: doc._id},
{$set: {updated: true}} // or whatever else you need to change
);
// updateOne returns a promise, if not supplied a callback. Just return it.
});
})
.then(function(count) {
console.log("All Done. Processed", count, "records");
theDb.close();
})
You can do something like this using the async lib. The key point here is to check if the current doc is null. If it is, it means you are finished.
async.series([
function (cb) {
cursor.each(function (err, doc) {
if (err) {
cb(err);
} else if (doc === null) {
cb();
} else {
console.log(doc);
array.push(doc);
}
});
}
], function (err) {
callback(err, array);
});
You could use a Future:
myCollection.find({}, function(err, resultCursor) {
resultCursor.count(Meteor.bindEnvironment(function(err,count){
for(var i=0;i<count;i++)
{
var itemFuture=new Future();
resultCursor.nextObject(function(err,item)){
itemFuture.result(item);
}
var item=itemFuture.wait();
//do what you want with the item,
//and continue with the loop if so
}
}));
});
You can get the result in an Array and iterate using a recursive function, something like this.
myCollection.find({}).toArray(function (err, items) {
var count = items.length;
var fn = function () {
externalAsyncFuntion(items[count], function () {
count -= 1;
if (count) fn();
})
}
fn();
});
Edit:
This is only applicable for small datasets, for larger one's you should use cursors as mentioned in other answers.
A more modern approach that uses for await:
const cursor = db.collection("foo").find({});
for await(const doc of cursor) {
// process doc here with await
await processDoc(doc);
}
You could use simple setTimeOut's. This is an example in typescript running on nodejs (I am using promises via the 'when' module but it can be done without them as well):
import mongodb = require("mongodb");
var dbServer = new mongodb.Server('localhost', 27017, {auto_reconnect: true}, {});
var db = new mongodb.Db('myDb', dbServer);
var util = require('util');
var when = require('when'); //npm install when
var dbDefer = when.defer();
db.open(function() {
console.log('db opened...');
dbDefer.resolve(db);
});
dbDefer.promise.then(function(db : mongodb.Db){
db.collection('myCollection', function (error, dataCol){
if(error) {
console.error(error); return;
}
var doneReading = when.defer();
var processOneRecordAsync = function(record) : When.Promise{
var result = when.defer();
setTimeout (function() {
//simulate a variable-length operation
console.log(util.inspect(record));
result.resolve('record processed');
}, Math.random()*5);
return result.promise;
}
var runCursor = function (cursor : MongoCursor){
cursor.next(function(error : any, record : any){
if (error){
console.log('an error occurred: ' + error);
return;
}
if (record){
processOneRecordAsync(record).then(function(r){
setTimeout(function() {runCursor(cursor)}, 1);
});
}
else{
//cursor up
doneReading.resolve('done reading data.');
}
});
}
dataCol.find({}, function(error, cursor : MongoCursor){
if (!error)
{
setTimeout(function() {runCursor(cursor)}, 1);
}
});
doneReading.promise.then(function(message : string){
//message='done reading data'
console.log(message);
});
});
});

Resources