mongoose .create statement not saving inside async.each loop - node.js

Following on from my question last week, I got the async loop working, but I have run into a new issue. It appears the documents being created by the loop are not being saved to the db. They are however being pushed into the parent record (plan incode below), and that is saving the push successfully. I've included the full route code below:
// Auto-populate entries
router.post( "/populate", middlewareObj.isLoggedIn, function(req, res){
var orgList=[]
console.log( req.body.orgs);
if (typeof req.body.orgs == 'string') {
orgList = [req.body.orgs]
} else {
orgList = req.body.orgs
};
orgList.save
console.log( orgList)
//lookup Plan using ID
Plan.findById( req.params.id, function(err, foundPlan){
if(err){
console.log( err);
res.redirect("/plans");
} else {
BaseData.find({
"contributingRegion": foundPlan.contributingRegion,
"org": { $in: orgList}
}, function( err, baseData) {
if (err) {
console.log( err)
} else {
console.log( baseData.length);
//Create entries & push into plan
async.each( baseData, function(data, next) {
Entry.create(data, function(err, entry) { /*Create entry*/
if(err) {
next (err);
} else {
// Create other entry variables
entry.author.id = req.user._id;
entry.author.username = req.user.username;
entry.save();
entry.adjHC = entry.initHC;
entry.adjResRate = entry.initResRate;
entry.yr1Resignations = Math.round(entry.adjHC * entry.adjResRate);
entry.yr1Supply = entry.adjHC - entry.yr1Resignations;
entry.yr2Resignations = Math.round(entry.yr1Supply * entry.adjResRate);
entry.yr2Supply = entry.yr1Supply - entry.yr2Resignations;
entry.yr3Resignations = Math.round(entry.yr2Supply * entry.adjResRate);
entry.yr3Supply = entry.yr2Supply - entry.yr3Resignations;
entry.yr4Resignations = Math.round(entry.yr3Supply * entry.adjResRate);
entry.yr4Supply = entry.yr3Supply - entry.yr4Resignations;
entry.yr5Resignations = Math.round(entry.yr4Supply * entry.adjResRate);
entry.yr5Supply = entry.yr4Supply- entry.yr5Resignations;
entry.demandYr0 = entry.adjHC;
entry.demandYr1 = entry.adjHC;
entry.demandYr2 = entry.adjHC;
entry.demandYr3 = entry.adjHC;
entry.demandYr4 = entry.adjHC;
entry.demandYr5 = entry.adjHC;
entry.gapYr1 = entry.yr1Supply - entry.demandYr1;
entry.gapYr2 = entry.yr2Supply - entry.demandYr2;
entry.gapYr3 = entry.yr3Supply - entry.demandYr3;
entry.gapYr4 = entry.yr4Supply - entry.demandYr4;
entry.gapYr5 = entry.yr5Supply - entry.demandYr5;
entry.save();
console.log(entry._id + ' saved')
foundPlan.planEntries.push(entry);
foundPlan.save();
// next iteration
next();
}
}); /*entry Create*/
}, function(err) {
// This function runs when all iterations are done
if (err) throw err;
res.redirect('/plans/' + foundPlan._id);
} ); /*End of Async Loop*/
};
}); /*End of BaseDate.find*/
}
}); /*End of Plan.findById*/
});
From the various console.logs in the code, that I've been using to troubleshoot, I know the code is finding the right number of entries in baseData (42 records).
I should add the same Entry.create code is used in a separate route where Entries are added manually one-by-one by the user.
When I look at the mongodb itself to look at the size of the planEntries array in the plan I get:
// 1) Manually added entry
{ "_id" : ObjectId("5a6f00421046d90019e2c6e8"), "name" : "test", "numEntries" : 1 }
// 2) After First populate route
{ "_id" : ObjectId("5a6f00421046d90019e2c6e8"), "name" : "test", "numEntries" : 46 }
// 3) After a second populate route (same selections)
{ "_id" : ObjectId("5a6f00421046d90019e2c6e8"), "name" : "test", "numEntries" : 239 }
The manually added entry saves correctly, but the others don't seem to be saving the document.
What is also unexpected, is after each auto-populate, I would expect numEmtries to increase by 42, but it seems to be growing by much greater numbers

So managed to work it out. There were 2 things going on.
Firstly, Entry.create creates the object but doesn't allow it to save to the DB for some reason. If i swap my code to be var entry = new ENTRY(), then it saved them to the database.
Once I got this working, I ran into an issue of each entry being passed into the plan multiple times (the same number of times as there were entries to add so i ended up with 42 * 42 entries). This was resolved by clearing out the entry object before the next() function is called. I also switched to async.eachSeries. I will test is async.each later and see if that works.
UPDATE: async.each reverts back to the duplication of results, it appears async.eachSeries is needed to prevent this duplication.
Complete final code for completeness:
// Auto-populate entries
router.post( "/populate", middlewareObj.isLoggedIn, function( req, res) {
// Ensure req.body.orgs is an array
var orgList=[]
if (typeof req.body.orgs == 'string') {
orgList = [req.body.orgs]
} else {
orgList = req.body.orgs
};
orgList.save
//lookup Plan using ID
Plan.findById( req.params.id, function(err, foundPlan){
if(err){
console.log( err);
res.redirect("/plans");
} else {
BaseData.find({
"contributingRegion": foundPlan.contributingRegion,
"org": { $in: orgList}
}, function( err, baseData) {
if (err) {
console.log( err)
} else {
// console.log( baseData.length);
//Loop through baseData to create entries & push into plan
async.eachSeries( baseData, function(data, next) {
console.log(data);
// Create new entry record
var entry = new Entry({
author: {
id: req.user._id,
username: req.user.username
},
org: data.org,
jobFamily: data.jobFamily,
globalGrade: data.globalGrade,
location: data.location,
initHC: data.initHC,
initResRate: data.initResRate.toFixed(3)
});
// Derive calculated variables for entry
entry.adjHC = entry.initHC;
entry.adjResRate = entry.initResRate;
entry.yr1Resignations = Math.round(entry.adjHC * entry.adjResRate);
entry.yr1Supply = entry.adjHC - entry.yr1Resignations;
entry.yr2Resignations = Math.round(entry.yr1Supply * entry.adjResRate);
entry.yr2Supply = entry.yr1Supply - entry.yr2Resignations;
entry.yr3Resignations = Math.round(entry.yr2Supply * entry.adjResRate);
entry.yr3Supply = entry.yr2Supply - entry.yr3Resignations;
entry.yr4Resignations = Math.round(entry.yr3Supply * entry.adjResRate);
entry.yr4Supply = entry.yr3Supply - entry.yr4Resignations;
entry.yr5Resignations = Math.round(entry.yr4Supply * entry.adjResRate);
entry.yr5Supply = entry.yr4Supply- entry.yr5Resignations;
entry.demandYr0 = entry.adjHC;
entry.demandYr1 = entry.adjHC;
entry.demandYr2 = entry.adjHC;
entry.demandYr3 = entry.adjHC;
entry.demandYr4 = entry.adjHC;
entry.demandYr5 = entry.adjHC;
entry.gapYr1 = entry.yr1Supply - entry.demandYr1;
entry.gapYr2 = entry.yr2Supply - entry.demandYr2;
entry.gapYr3 = entry.yr3Supply - entry.demandYr3;
entry.gapYr4 = entry.yr4Supply - entry.demandYr4;
entry.gapYr5 = entry.yr5Supply - entry.demandYr5;
entry.save();
// Push entry into plan
foundPlan.planEntries.push(entry);
foundPlan.save(function(err){
if (err) {
console.log(err)
} else{
entry = {}
// next iteration
next()
};
});
}, function(err) {
// This function runs when all iterations are done
if (err) throw err;
res.redirect('/plans/' + foundPlan._id);
} ); /*End of Async Loop*/
};
}); /*End of BaseDate.find*/
}
}); /*End of Plan.findById*/
});
// =====================================================
// ======UPDATE ENTRIES FROM SUPPLY/DEMAND SCREENS======
// =====================================================
// Entry Update (from Supply Screen)
router.put("/:entryId", middlewareObj.isLoggedIn, function(req, res){
// console.log(req.body)
var adjHC = req.body.entry.adjHC;
var adjResRate = req.body.entry.adjResRate / 100;
var yr1Resignations = Math.round(adjHC * adjResRate);
var yr1Supply = adjHC - yr1Resignations;
var yr2Resignations = Math.round(yr1Supply * adjResRate);
var yr2Supply = yr1Supply - yr2Resignations;
var yr3Resignations = Math.round(yr2Supply * adjResRate);
var yr3Supply = yr2Supply - yr3Resignations;
var yr4Resignations = Math.round(yr3Supply * adjResRate);
var yr4Supply = yr3Supply - yr4Resignations;
var yr5Resignations = Math.round(yr4Supply * adjResRate);
var yr5Supply = yr4Supply- yr5Resignations;
var demandYr0 = adjHC;
var demandYr1 = adjHC;
var demandYr2 = adjHC;
var demandYr3 = adjHC;
var demandYr4 = adjHC;
var demandYr5 = adjHC;
var gapYr1 = yr1Supply - demandYr1;
var gapYr2 = yr2Supply - demandYr2;
var gapYr3 = yr3Supply - demandYr3;
var gapYr4 = yr4Supply - demandYr4;
var gapYr5 = yr5Supply - demandYr5;
var updatedEntry = {adjHC : adjHC,
adjResRate : adjResRate,
yr1Resignations : yr1Resignations,
yr2Resignations : yr2Resignations,
yr3Resignations : yr3Resignations,
yr4Resignations : yr4Resignations,
yr5Resignations : yr5Resignations,
yr1Supply : yr1Supply,
yr2Supply : yr2Supply,
yr3Supply : yr3Supply,
yr4Supply : yr4Supply,
yr5Supply : yr5Supply,
demandYr0 : demandYr0,
demandYr1 : demandYr1,
demandYr2 : demandYr2,
demandYr3 : demandYr3,
demandYr4 : demandYr4,
demandYr5 : demandYr5,
gapYr1 : gapYr1,
gapYr2 : gapYr2,
gapYr3 : gapYr3,
gapYr4 : gapYr4,
gapYr5 : gapYr5 };
//lookup Entry using ID
Entry.findByIdAndUpdate(req.params.entryId, updatedEntry, {new: true}, function(err, foundEntry){
if(err){
console.log(err);
res.redirect("/plans");
} else {
res.json(foundEntry);
}
});
});

Related

Edit a JSON object

I retrieved a JSON object from a local database, I want to edit a value (invItems) and add a new value to it (filed[filed.invItems]), then upload it back to the database, but it does not seem to work (the JSON does not seem to change)
async function invPut(itemID, message) {
var filed = await frenzyDB.getKey(id + "_invcache");
console.log("Before: " + filed)
newInvItems = filed.invItems + 1;
filed.invItems = newInvItems;
filed[filed.invItems] = itemID;
console.log("After: " + filed);
await frenzyDB.addKey(id + "_invcache", filed)
}
Console Output:
Before: {"invItems":0}
After: {"invItems":0}
It shows no errors, but the JSON doesnt change. Am I doing something wrong? If so, what can I do to fix it?
Thanks for all your help!
Notes:
frenzyDB is just a javascript file that deals with a standard REPL.it Database
Code of frenzyDB:
const Database = require("#replit/database")
const db = new Database()
async function addKey(key, value) {
await db.set(key, value).then(() => {return;});
}
async function getKey(key) {
return await db.get(key).then(value => {return value;});
}
function listAllKeys() {
db.list().then(keys => {return keys;});
}
async function hasKey(key) {
var keys = await listAllKeys();
if (keys.includes(key)) {
return true;
} else {
return false;
}
}
async function removeKey(key) {
await db.delete(key).then(() => {return;});
}
module.exports = {
addKey,
getKey,
listAllKeys,
hasKey,
removeKey
};
Edit: Latest code:
async function invPut(itemID, message) {
await init(message.author.id);
var filed = await frenzyDB.getKey(message.author.id + "_invcache");
console.log(filed)
const result = {};
result.invItems = (filed['invItems'] + 1) || 1;
result.hasOwnProperty(filed.invItems) ? result[filed.invItems + 1] = itemID : result[filed.invItems] = itemID;
console.log(result);
frenzyDB.addKey(message.author.id + "_invcache", result)
message.reply("A **"+ itemIDs[itemID].name + "** was placed in your inventory");
return true;
}
EDIT 2: Latest Console Output:
{ '4': 3, invItems: 5 }
{ '5': 3, invItems: 6 }
Any help will be appreciated!
Thanks
Try this
// Demo Data
const itemID = 10;
var filed = { "invItems" : 0 };
// Real function
console.log("Before: " + JSON.stringify(filed));
const result = {};
result.invItems = (filed['invItems'] + 1) || 1;
result.hasOwnProperty(filed.invItems) ? result[filed.invItems + 1] = itemID : result[filed.invItems] = itemID;
console.log("After: " + JSON.stringify(result));
The result I get is
Before: {"invItems":0}
After: {"0":10,"invItems":1}
You would then of course use result to store the data away in the DB.
async function invPut(itemID, message) {
// Typo?
var filed = await frenzyDB.getKey(itemID + "_invcache");
console.log("Before: " + filed)
const result = {};
result.invItems = (filed['invItems'] + 1) || 1;
result.hasOwnProperty(filed.invItems) ? result[filed.invItems + 1] = itemID : result[filed.invItems] = itemID;
console.log("After: " + result);
// Typo?
await frenzyDB.addKey(itemID + "_invcache", result)
}
Answer Edit:
const result = { ...filed };
result.invItems = (filed['invItems'] + 1) || 1;
result.hasOwnProperty(filed.invItems) ? result[filed.invItems + 1] = itemID : result[filed.invItems] = itemID;
console.log(JSON.stringify(result));
maybe this will help you
const json = fs.readFileSync(`${__dirname}/data/data.json`, "utf-8");
const inputData = JSON.parse(json);
inputData.push({input: 'front'}) // creates new element for data.json
-------------------------------------------
array.push({front: 'front', back: 'back'});

Delete a Document with all Subcollections and Nested Subcollections in Firestore

How can you delete a Document with all it's collections and nested subcollections? (inside the functions environment)
In the RTDB you can ref.child('../someNode).setValue(null) and that completes the desired behavior.
I can think of two ways you could achieve the desired delete behavior, both with tremendously ghastly drawbacks.
Create a 'Super' function that will spider every document and delete them in a batch.
This function would be complicated, brittle to changes, and might take a lengthy execution time.
Add 'onDelete' triggers for each Document type, and make it delete any direct subcollections. You'll call delete on the root document, and the deletion calls will propagate down the 'tree'. This is sluggish, scales atrociously and is costly due to the colossal load of function executions.
Imagine you would have to delete a 'GROUP' and all it's children. It would be deeply chaotic with #1 and pricey with #2 (1 function call per doc)
groups > GROUP > projects > PROJECT > files > FILE > assets > ASSET
> urls > URL
> members > MEMBER
> questions > QUESTION > answers > ANSWER > replies > REPLY
> comments > COMMENT
> resources > RESOURCE > submissions > SUBMISSION
> requests > REQUEST
Is there a superior/favored/cleaner way to delete a document and all it's nested subcollections?
It ought to be possible considering you can do it from the console.
according to firebase documentation:
https://firebase.google.com/docs/firestore/solutions/delete-collections
Deleting collection with nested subcollections might be done easy and neat with node-JS on the server side.
const client = require('firebase-tools');
await client.firestore
.delete(collectionPath, {
project: process.env.GCLOUD_PROJECT,
recursive: true,
yes: true
});
Unfortunately, your analysis is spot on and indeed this use case does require a lot of ceremony. According to official documentation, there is no support for deep deletes in a single shot in firestore neither via client libraries nor rest-api nor the cli tool.
The cli is open sourced and its implementation lives here: https://github.com/firebase/firebase-tools/blob/master/src/firestore/delete.js. They basically implemented option 1. you described in your question, so you can take some inspiration from there.
Both options 1. and 2. are far from ideal situation and to make your solution 100% reliable you will need to keep a persistent queue with deletion tasks, as any error in the long running procedure will leave your system in some ill-defined state.
I would discourage to go with raw option 2. as recursive cloud function calls may very easily went wrong - for example, hitting max. limits.
In case the link changed, below the full source of https://github.com/firebase/firebase-tools/blob/master/src/firestore/delete.js:
"use strict";
var clc = require("cli-color");
var ProgressBar = require("progress");
var api = require("../api");
var firestore = require("../gcp/firestore");
var FirebaseError = require("../error");
var logger = require("../logger");
var utils = require("../utils");
/**
* Construct a new Firestore delete operation.
*
* #constructor
* #param {string} project the Firestore project ID.
* #param {string} path path to a document or collection.
* #param {boolean} options.recursive true if the delete should be recursive.
* #param {boolean} options.shallow true if the delete should be shallow (non-recursive).
* #param {boolean} options.allCollections true if the delete should universally remove all collections and docs.
*/
function FirestoreDelete(project, path, options) {
this.project = project;
this.path = path;
this.recursive = Boolean(options.recursive);
this.shallow = Boolean(options.shallow);
this.allCollections = Boolean(options.allCollections);
// Remove any leading or trailing slashes from the path
if (this.path) {
this.path = this.path.replace(/(^\/+|\/+$)/g, "");
}
this.isDocumentPath = this._isDocumentPath(this.path);
this.isCollectionPath = this._isCollectionPath(this.path);
this.allDescendants = this.recursive;
this.parent = "projects/" + project + "/databases/(default)/documents";
// When --all-collections is passed any other flags or arguments are ignored
if (!options.allCollections) {
this._validateOptions();
}
}
/**
* Validate all options, throwing an exception for any fatal errors.
*/
FirestoreDelete.prototype._validateOptions = function() {
if (this.recursive && this.shallow) {
throw new FirebaseError("Cannot pass recursive and shallow options together.");
}
if (this.isCollectionPath && !this.recursive && !this.shallow) {
throw new FirebaseError("Must pass recursive or shallow option when deleting a collection.");
}
var pieces = this.path.split("/");
if (pieces.length === 0) {
throw new FirebaseError("Path length must be greater than zero.");
}
var hasEmptySegment = pieces.some(function(piece) {
return piece.length === 0;
});
if (hasEmptySegment) {
throw new FirebaseError("Path must not have any empty segments.");
}
};
/**
* Determine if a path points to a document.
*
* #param {string} path a path to a Firestore document or collection.
* #return {boolean} true if the path points to a document, false
* if it points to a collection.
*/
FirestoreDelete.prototype._isDocumentPath = function(path) {
if (!path) {
return false;
}
var pieces = path.split("/");
return pieces.length % 2 === 0;
};
/**
* Determine if a path points to a collection.
*
* #param {string} path a path to a Firestore document or collection.
* #return {boolean} true if the path points to a collection, false
* if it points to a document.
*/
FirestoreDelete.prototype._isCollectionPath = function(path) {
if (!path) {
return false;
}
return !this._isDocumentPath(path);
};
/**
* Construct a StructuredQuery to find descendant documents of a collection.
*
* See:
* https://firebase.google.com/docs/firestore/reference/rest/v1beta1/StructuredQuery
*
* #param {boolean} allDescendants true if subcollections should be included.
* #param {number} batchSize maximum number of documents to target (limit).
* #param {string=} startAfter document name to start after (optional).
* #return {object} a StructuredQuery.
*/
FirestoreDelete.prototype._collectionDescendantsQuery = function(
allDescendants,
batchSize,
startAfter
) {
var nullChar = String.fromCharCode(0);
var startAt = this.parent + "/" + this.path + "/" + nullChar;
var endAt = this.parent + "/" + this.path + nullChar + "/" + nullChar;
var where = {
compositeFilter: {
op: "AND",
filters: [
{
fieldFilter: {
field: {
fieldPath: "__name__",
},
op: "GREATER_THAN_OR_EQUAL",
value: {
referenceValue: startAt,
},
},
},
{
fieldFilter: {
field: {
fieldPath: "__name__",
},
op: "LESS_THAN",
value: {
referenceValue: endAt,
},
},
},
],
},
};
var query = {
structuredQuery: {
where: where,
limit: batchSize,
from: [
{
allDescendants: allDescendants,
},
],
select: {
fields: [{ fieldPath: "__name__" }],
},
orderBy: [{ field: { fieldPath: "__name__" } }],
},
};
if (startAfter) {
query.structuredQuery.startAt = {
values: [{ referenceValue: startAfter }],
before: false,
};
}
return query;
};
/**
* Construct a StructuredQuery to find descendant documents of a document.
* The document itself will not be included
* among the results.
*
* See:
* https://firebase.google.com/docs/firestore/reference/rest/v1beta1/StructuredQuery
*
* #param {boolean} allDescendants true if subcollections should be included.
* #param {number} batchSize maximum number of documents to target (limit).
* #param {string=} startAfter document name to start after (optional).
* #return {object} a StructuredQuery.
*/
FirestoreDelete.prototype._docDescendantsQuery = function(allDescendants, batchSize, startAfter) {
var query = {
structuredQuery: {
limit: batchSize,
from: [
{
allDescendants: allDescendants,
},
],
select: {
fields: [{ fieldPath: "__name__" }],
},
orderBy: [{ field: { fieldPath: "__name__" } }],
},
};
if (startAfter) {
query.structuredQuery.startAt = {
values: [{ referenceValue: startAfter }],
before: false,
};
}
return query;
};
/**
* Query for a batch of 'descendants' of a given path.
*
* For document format see:
* https://firebase.google.com/docs/firestore/reference/rest/v1beta1/Document
*
* #param {boolean} allDescendants true if subcollections should be included,
* #param {number} batchSize the maximum size of the batch.
* #param {string=} startAfter the name of the document to start after (optional).
* #return {Promise<object[]>} a promise for an array of documents.
*/
FirestoreDelete.prototype._getDescendantBatch = function(allDescendants, batchSize, startAfter) {
var url;
var body;
if (this.isDocumentPath) {
url = this.parent + "/" + this.path + ":runQuery";
body = this._docDescendantsQuery(allDescendants, batchSize, startAfter);
} else {
url = this.parent + ":runQuery";
body = this._collectionDescendantsQuery(allDescendants, batchSize, startAfter);
}
return api
.request("POST", "/v1beta1/" + url, {
auth: true,
data: body,
origin: api.firestoreOrigin,
})
.then(function(res) {
// Return the 'document' property for each element in the response,
// where it exists.
return res.body
.filter(function(x) {
return x.document;
})
.map(function(x) {
return x.document;
});
});
};
/**
* Progress bar shared by the class.
*/
FirestoreDelete.progressBar = new ProgressBar("Deleted :current docs (:rate docs/s)", {
total: Number.MAX_SAFE_INTEGER,
});
/**
* Repeatedly query for descendants of a path and delete them in batches
* until no documents remain.
*
* #return {Promise} a promise for the entire operation.
*/
FirestoreDelete.prototype._recursiveBatchDelete = function() {
var self = this;
// Tunable deletion parameters
var readBatchSize = 7500;
var deleteBatchSize = 250;
var maxPendingDeletes = 15;
var maxQueueSize = deleteBatchSize * maxPendingDeletes * 2;
// All temporary variables for the deletion queue.
var queue = [];
var numPendingDeletes = 0;
var pagesRemaining = true;
var pageIncoming = false;
var lastDocName;
var failures = [];
var retried = {};
var queueLoop = function() {
if (queue.length == 0 && numPendingDeletes == 0 && !pagesRemaining) {
return true;
}
if (failures.length > 0) {
logger.debug("Found " + failures.length + " failed deletes, failing.");
return true;
}
if (queue.length <= maxQueueSize && pagesRemaining && !pageIncoming) {
pageIncoming = true;
self
._getDescendantBatch(self.allDescendants, readBatchSize, lastDocName)
.then(function(docs) {
pageIncoming = false;
if (docs.length == 0) {
pagesRemaining = false;
return;
}
queue = queue.concat(docs);
lastDocName = docs[docs.length - 1].name;
})
.catch(function(e) {
logger.debug("Failed to fetch page after " + lastDocName, e);
pageIncoming = false;
});
}
if (numPendingDeletes > maxPendingDeletes) {
return false;
}
if (queue.length == 0) {
return false;
}
var toDelete = [];
var numToDelete = Math.min(deleteBatchSize, queue.length);
for (var i = 0; i < numToDelete; i++) {
toDelete.push(queue.shift());
}
numPendingDeletes++;
firestore
.deleteDocuments(self.project, toDelete)
.then(function(numDeleted) {
FirestoreDelete.progressBar.tick(numDeleted);
numPendingDeletes--;
})
.catch(function(e) {
// For server errors, retry if the document has not yet been retried.
if (e.status >= 500 && e.status < 600) {
logger.debug("Server error deleting doc batch", e);
// Retry each doc up to one time
toDelete.forEach(function(doc) {
if (retried[doc.name]) {
logger.debug("Failed to delete doc " + doc.name + " multiple times.");
failures.push(doc.name);
} else {
retried[doc.name] = true;
queue.push(doc);
}
});
} else {
logger.debug("Fatal error deleting docs ", e);
failures = failures.concat(toDelete);
}
numPendingDeletes--;
});
return false;
};
return new Promise(function(resolve, reject) {
var intervalId = setInterval(function() {
if (queueLoop()) {
clearInterval(intervalId);
if (failures.length == 0) {
resolve();
} else {
reject("Failed to delete documents " + failures);
}
}
}, 0);
});
};
/**
* Delete everything under a given path. If the path represents
* a document the document is deleted and then all descendants
* are deleted.
*
* #return {Promise} a promise for the entire operation.
*/
FirestoreDelete.prototype._deletePath = function() {
var self = this;
var initialDelete;
if (this.isDocumentPath) {
var doc = { name: this.parent + "/" + this.path };
initialDelete = firestore.deleteDocument(doc).catch(function(err) {
logger.debug("deletePath:initialDelete:error", err);
if (self.allDescendants) {
// On a recursive delete, we are insensitive to
// failures of the initial delete
return Promise.resolve();
}
// For a shallow delete, this error is fatal.
return utils.reject("Unable to delete " + clc.cyan(this.path));
});
} else {
initialDelete = Promise.resolve();
}
return initialDelete.then(function() {
return self._recursiveBatchDelete();
});
};
/**
* Delete an entire database by finding and deleting each collection.
*
* #return {Promise} a promise for all of the operations combined.
*/
FirestoreDelete.prototype.deleteDatabase = function() {
var self = this;
return firestore
.listCollectionIds(this.project)
.catch(function(err) {
logger.debug("deleteDatabase:listCollectionIds:error", err);
return utils.reject("Unable to list collection IDs");
})
.then(function(collectionIds) {
var promises = [];
logger.info("Deleting the following collections: " + clc.cyan(collectionIds.join(", ")));
for (var i = 0; i < collectionIds.length; i++) {
var collectionId = collectionIds[i];
var deleteOp = new FirestoreDelete(self.project, collectionId, {
recursive: true,
});
promises.push(deleteOp.execute());
}
return Promise.all(promises);
});
};
/**
* Check if a path has any children. Useful for determining
* if deleting a path will affect more than one document.
*
* #return {Promise<boolean>} a promise that retruns true if the path has
* children and false otherwise.
*/
FirestoreDelete.prototype.checkHasChildren = function() {
return this._getDescendantBatch(true, 1).then(function(docs) {
return docs.length > 0;
});
};
/**
* Run the delete operation.
*/
FirestoreDelete.prototype.execute = function() {
var verifyRecurseSafe;
if (this.isDocumentPath && !this.recursive && !this.shallow) {
verifyRecurseSafe = this.checkHasChildren().then(function(multiple) {
if (multiple) {
return utils.reject("Document has children, must specify -r or --shallow.", { exit: 1 });
}
});
} else {
verifyRecurseSafe = Promise.resolve();
}
var self = this;
return verifyRecurseSafe.then(function() {
return self._deletePath();
});
};
module.exports = FirestoreDelete;
For those who don't want or can't use cloud functions, I found a recursiveDelete function in the admin sdk:
https://googleapis.dev/nodejs/firestore/latest/Firestore.html#recursiveDelete
// Recursively delete a reference and log the references of failures.
const bulkWriter = firestore.bulkWriter();
bulkWriter
.onWriteError((error) => {
if (error.failedAttempts < MAX_RETRY_ATTEMPTS) {
return true;
} else {
console.log('Failed write at document: ', error.documentRef.path);
return false;
}
});
await firestore.recursiveDelete(docRef, bulkWriter);
i don't know how much helpful for you but test it and compare the execution time which i get use it from fire store doc
/** Delete a collection in batches to avoid out-of-memory errors.
* Batch size may be tuned based on document size (atmost 1MB) and application requirements.
*/
void deleteCollection(CollectionReference collection, int batchSize) {
try {
// retrieve a small batch of documents to avoid out-of-memory errors
ApiFuture<QuerySnapshot> future = collection.limit(batchSize).get();
int deleted = 0;
// future.get() blocks on document retrieval
List<QueryDocumentSnapshot> documents = future.get().getDocuments();
for (QueryDocumentSnapshot document : documents) {
document.getReference().delete();
++deleted;
}
if (deleted >= batchSize) {
// retrieve and delete another batch
deleteCollection(collection, batchSize);
}
} catch (Exception e) {
System.err.println("Error deleting collection : " + e.getMessage());
}
}
As mentioned above, you need to write good bit of code for this. For each document that is to be deleted you need to check if it has one or more collections. If it does, then you need to queue those up for deletion too. I wrote the code below to do this. It's not tested to be scalable to large data sets, which is fine for me as I'm using it to clean up after small scale integration tests. If you need something more scalable, feel free to take this as a starting point and play around with batching more.
class FirebaseDeleter {
constructor(database, collections) {
this._database = database;
this._pendingCollections = [];
}
run() {
return new Promise((resolve, reject) => {
this._callback = resolve;
this._database.getCollections().then(collections => {
this._pendingCollections = collections;
this._processNext();
});
});
}
_processNext() {
const collections = this._pendingCollections;
this._pendingCollections = [];
const promises = collections.map(collection => {
return this.deleteCollection(collection, 10000);
});
Promise.all(promises).then(() => {
if (this._pendingCollections.length == 0) {
this._callback();
} else {
process.nextTick(() => {
this._processNext();
});
}
});
}
deleteCollection(collectionRef, batchSize) {
var query = collectionRef;
return new Promise((resolve, reject) => {
this.deleteQueryBatch(query, batchSize, resolve, reject);
});
}
deleteQueryBatch(query, batchSize, resolve, reject) {
query
.get()
.then(snapshot => {
// When there are no documents left, we are done
if (snapshot.size == 0) {
return 0;
}
// Delete documents in a batch
var batch = this._database.batch();
const collectionPromises = [];
snapshot.docs.forEach(doc => {
collectionPromises.push(
doc.ref.getCollections().then(collections => {
collections.forEach(collection => {
this._pendingCollections.push(collection);
});
})
);
batch.delete(doc.ref);
});
// Wait until we know if all the documents have collections before deleting them.
return Promise.all(collectionPromises).then(() => {
return batch.commit().then(() => {
return snapshot.size;
});
});
})
.then(numDeleted => {
if (numDeleted === 0) {
resolve();
return;
}
// Recurse on the next process tick, to avoid
// exploding the stack.
process.nextTick(() => {
this.deleteQueryBatch(query, batchSize, resolve, reject);
});
})
.catch(reject);
}
}
Solution using Node.js Admin SDK
export const deleteDocument = async (doc: FirebaseFirestore.DocumentReference) => {
const collections = await doc.listCollections()
await Promise.all(collections.map(collection => deleteCollection(collection)))
await doc.delete()
}
export const deleteCollection = async (collection: FirebaseFirestore.CollectionReference) => {
const query = collection.limit(100)
while (true) {
const snap = await query.get()
if (snap.empty) {
return
}
await Promise.all(snap.docs.map(doc => deleteDocument(doc.ref)))
}
}
There is now a simple way delete a document and all of its subcollections using NodeJS.
This was made available in nodejs-firestore version v4.11.0.
From the docs:
recursiveDelete()
Recursively deletes all documents and subcollections at and under the specified level.
import * as admin from 'firebase-admin'
const ref = admin.firestore().doc('my_document')
admin.firestore().recursiveDelete(ref)
You can write a handler which will recursive delete all nested descendants when triggers onDelete Firestore event.
Example of handler:
const deleteDocumentWithDescendants = async (documentSnap: FirebaseFirestore.QueryDocumentSnapshot) => {
return documentSnap.ref.listCollections().then((subCollections) => {
subCollections.forEach((subCollection) => {
return subCollection.get().then((snap) => {
snap.forEach((doc) => {
doc.ref.delete();
deleteDocumentWithDescendants(doc);
});
});
});
});
};
// On any document delete
export const onDocumentDelete = async (documentSnap: FirebaseFirestore.QueryDocumentSnapshot) => {
await deleteDocumentWithDescendants(documentSnap);
};
Tie it up with firestore event:
exports.onDeleteDocument = functions.firestore.document('{collectionId}/{docId}')
.onDelete(onDocumentDelete);
// You can add all the collection hierarchy to object
private collectionsHierarchy = {
groups: [
[
'groups',
'projects',
'files',
'assets',
'urls',
'members'
]
]
};
async deleteDocument(rootDocument: string) {
// if (!rootDocument.startsWith(`groups/${this.groupId()}`)) {
// rootDocument = `groups/${this.groupId()}/${rootDocument}`;
// }
const batchSize: number = 100;
let root = await this.db
.doc(rootDocument)
.get()
.toPromise();
if (!root.exists) {
return;
}
const segments = rootDocument.split('/');
const documentCollection = segments[segments.length - 2];
const allHierarchies = this.collectionsHierarchy[documentCollection];
for (let i = 0; i < allHierarchies.length; i = i + 1) {
const hierarchy = allHierarchies[i];
const collectionIndex = hierarchy.indexOf(documentCollection) + 1;
const nextCollections: [] = hierarchy.slice(collectionIndex);
const stack = [`${root.ref.path}/${nextCollections.shift()}`];
while (stack.length) {
const path = stack.pop();
const collectionRef = this.db.firestore.collection(path);
const query = collectionRef.orderBy('__name__').limit(batchSize);
let deletedIems = await this.deleteQueryBatch(query, batchSize);
const nextCollection = nextCollections.shift();
deletedIems = deletedIems.map(di => `${di}/${nextCollection}`);
stack.push(...deletedIems);
}
}
await root.ref.delete();
}
private async deleteQueryBatch(
query: firebase.firestore.Query,
batchSize: number
) {
let deletedItems: string[] = [];
let snapshot = await query.get();
if (snapshot.size === 0) {
return deletedItems;
}
const batch = this.db.firestore.batch();
snapshot.docs.forEach(doc => {
deletedItems.push(doc.ref.path);
batch.delete(doc.ref);
});
await batch.commit();
if (snapshot.size === 0) {
return deletedItems;
}
const result = await this.deleteQueryBatch(query, batchSize);
return [...deletedItems, ...result];
}
Another solution using Node.js Admin SDK with Batch.
const traverseDocumentRecursively = async (
docRef: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>,
accumulatedRefs: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>[],
) => {
const collections = await docRef.listCollections();
if (collections.length > 0) {
for (const collection of collections) {
const snapshot = await collection.get();
for (const doc of snapshot.docs) {
accumulatedRefs.push(doc.ref);
await traverseDocumentRecursively(doc.ref, accumulatedRefs);
}
}
}
};
import { chunk } from 'lodash';
const doc = admin.firestore().collection('users').doc('001');
const accumulatedRefs: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>[] = [];
await traverseDocumentRecursively(doc, accumulatedRefs);
await Promise.all(
// Each transaction or batch of writes can write to a maximum of 500 documents
chunk(accumulatedRefs, 500).map((chunkedRefs) => {
const batch = admin.firestore().batch();
for (const ref of chunkedRefs) {
batch.delete(ref);
}
return batch.commit();
}),
);
Not sure if this is helpful for anyone here, but I am frequently facing the error "Fatal error deleting docs <list of docs>" when using firebase-tools.firestore.delete method (firebase-tools version 9.22.0).
I am currently handling these deletion failures using the returned error message in order to avoid rewriting the code cited at Oleg Bondarenko's answer. It uses admin.firestore to effectively delete the failed docs.
It's a poor solution since it relies on the error message, but at least it doesn't force us to copy the whole FirestoreDelete code to modify just a few lines of it:
firebase_tools.firestore
.delete(path, {
project: JSON.parse(process.env.FIREBASE_CONFIG!).projectId,
recursive: true,
yes: true,
token: getToken(),
})
.catch((err: Error) => {
if (err.name == "FirebaseError") {
// If recursive delete fails to delete some of the documents,
// parse the failures from the error message and delete it manually
const failedDeletingDocs = err.message.match(
/.*Fatal error deleting docs ([^\.]+)/
);
if (failedDeletingDocs) {
const docs = failedDeletingDocs[1].split(", ");
const docRefs = docs.map((doc) =>
firestore.doc(doc.slice(doc.search(/\(default\)\/documents/) + 19))
);
firestore
.runTransaction(async (t) => {
docRefs.forEach((doc) => t.delete(doc));
return docs;
})
.then((docs) =>
console.log(
"Succesfully deleted docs after failing: " + docs.join(", ")
)
)
.catch((err) => console.error(err));
}
}
});
If you are looking to delete user data, a solution to consider in 2022 is the Delete User Data Firebase Extension.
Once this is active, you can simply delete the user from Firebase Auth to trigger the recursive deletion of the user documents:
import admin from "firebase-admin";
admin.auth().deleteUser(userId);
You can call firebase.firestore().doc("whatever").set() and that will delete everything in that document.
The only way .set does not erase everything is if you set the merge flag to true.
See Firestore Documentation on Add Data
var cityRef = db.collection('cities').doc('BJ');
var setWithMerge = cityRef.set({
capital: true
}, { merge: true });

RecordRTC upload video to node js server

I am using RecordRTC from recording webrtc meeting. After implementing recording, when I test this application if both client are on the same system then its working fine. When I test this application on different system it isn't working fine and meeting is not recorded.
Here this is my code from stop recording client side.
recordRTC.stopRecording(function (videoURL) {
console.log('recordRTC.stopRecording Function inside');
SelectedFile = recordRTC.getBlob();
$('#uploadForm').append('#attachmentFileId', recordRTC.getBlob());
StartUpload();
});
var FReader;
var Name = "Meeting" + "_" + Date.now() + ".webm";
function StartUpload()
{
FReader = new FileReader();
FReader.onload = function (evnt)
{
socket.emit('Upload', { 'Name': Name, Data: evnt.target.result });
}
socket.emit('Start', { 'Name': Name, 'Size': SelectedFile.size });
}
socket.on('MoreData', function (data)
{
var Place = data['Place'] * 524288; //The Next Blocks Starting Position
var NewFile; //The Variable that will hold the new Block of Data
if (SelectedFile.webkitSlice)
NewFile = SelectedFile.webkitSlice(Place, Place + Math.min(524288, (SelectedFile.size - Place)));
else
NewFile = SelectedFile.slice(Place, Place + Math.min(524288, (SelectedFile.size - Place)));
FReader.readAsBinaryString(NewFile);
});
Server Side Code
I get this from here.
socket.on('Start', function (data) { //data contains the variables that we passed through in the html file
var Name = data['Name'];
Files[Name] = { //Create a new Entry in The Files Variable
FileSize : data['Size'],
Data : "",
Downloaded : 0
}
var Place = 0;
try{
var Stat = fs.statSync('Temp/' + Name);
if(Stat.isFile())
{
Files[Name]['Downloaded'] = Stat.size;
Place = Stat.size / 524288;
}
}
catch(er){} //It's a New File
fs.open("Temp/" + Name, 'a', 0755, function(err, fd){
if(err)
{
console.log(err);
}
else
{
Files[Name]['Handler'] = fd; //We store the file handler so we can write to it later
socket.emit('MoreData', { 'Place' : Place, Percent : 0 });
}
});
});
socket.on('Upload', function (data){
var Name = data['Name'];
Files[Name]['Downloaded'] += data['Data'].length;
Files[Name]['Data'] += data['Data'];
if(Files[Name]['Downloaded'] == Files[Name]['FileSize']) //If File is Fully Uploaded
{
fs.write(Files[Name]['Handler'], Files[Name]['Data'], null, 'Binary', function(err, Writen){
var input = fs.createReadStream("Temp/" + Name);
var output = fs.createWriteStream("Video/" + Name);
//util.pump(readableStream, writableStream, [callback])
//Deprecated: Use readableStream.pipe(writableStream)
input.pipe(output);
input.on("end", function() {
console.log("end");
fs.unlink("Temp/" + Name, function ()
{ //This Deletes The Temporary File
console.log("unlink this file:",Name );
//socket.emit('Done', {'Image' : 'Video/' + Name + '.jpg'});
});
});
});
}
else if(Files[Name]['Data'].length > 10485760){ //If the Data Buffer reaches 10MB
fs.write(Files[Name]['Handler'], Files[Name]['Data'], null, 'Binary', function(err, Writen){
Files[Name]['Data'] = ""; //Reset The Buffer
var Place = Files[Name]['Downloaded'] / 524288;
var Percent = (Files[Name]['Downloaded'] / Files[Name]['FileSize']) * 100;
socket.emit('MoreData', { 'Place' : Place, 'Percent' : Percent});
});
}
else
{
var Place = Files[Name]['Downloaded'] / 524288;
var Percent = (Files[Name]['Downloaded'] / Files[Name]['FileSize']) * 100;
socket.emit('MoreData', { 'Place' : Place, 'Percent' : Percent});
}
});
If both clients are on same machine/system its working fine, but if both clients are on different system then meeting is not recorded.

How to promisfy MongoDB/Mongoose .findOne before .push, in an array.forEach?

Have looked through the bluebird readMe examples, and am still wondering how to implement/convert some async code to involve promises with .then..
There are a number of ifStatements in here, though the main point is that while looping through the toArray, if the element exists in the database (findOne) then assign it to a variable to .push it to a field in an embedded doc of the new (.post & .save) db doc.
Here's the current async code that consequently runs the findOne after .save .. but it needs to run before:
// create a story (accessed at POST http://localhost:4200/api/v1/story)
.post(function(req, res) {
console.log('posting a new Story..from: ' + res.locals._id + '..' + res.locals.username );
var story = new Models.Story();
var toArray = req.body.to;
console.log(toArray); // [ 'user1', 'user2', 'user3' ]
toArray.forEach(toArrayLoop);
function toArrayLoop(element, index, array){
console.log('element: ' + element); // 'user1' .. 'user2' .. 'user3'
var out = false; // if sent to Self, out = true
if (element == res.locals.username) {out = true; console.log('to element: ' + element + ' == res.locals.username: ' + res.locals.username)}
var toUserId = '';
if (element) {
Models.User.findOne({username: element}, function (err, user) {
if (user) {
if (err) {
console.log(err);
res.send(err);
}
console.log('user._id = ' + user._id);
toUserId = user._id;
} else {
toUserId = '';
console.log('toUserId = ' + toUserId);
}
});
}
story.to.push({
user : toUserId, // push the findOne user._id
username : element, // push the toArray element
view :
{
inbox: true,
outbox: out,
archive: false,
},
updated : req.body.nowDatetime
});
}
var archive = false;
console.log('req.body.archive = ' + req.body.archive);
if (req.body.archive == 'true') { archive = true; console.log('archive = ' + archive); };
var in = false;
toArray.forEach(fromSelfLoop);
function fromSelfLoop(element, index, array){
console.log('checking if sent to Self: ' + element); // 'user1' .. if matches res.locals: (sent from/to Self)
if (element == res.locals.username) {in = true; console.log('from element: ' + element + ' == res.locals.username: ' + res.locals.username)}
} // if sent to Self, archive = true
story.from.push({
user : res.locals._id,
username : res.locals.username,
view :
{
inbox: in,
outbox: true,
archive: archive,
},
updated : req.body.nowDatetime
});
story.title = req.body.title;
// ..even more doc val assignments..
console.log('To: ' + req.body.to);
console.log('Story: ' + req.body.title);
story.save(function(err, result) {
if (err) {
console.log(err);
res.send(err);
}
console.log("The result: ", result);
res.json({ message: 'Story "' + story.title + '" Created' });
});
console.log('post success!');
})
You're way overkilling it in my opinion, Promises provide ways to synchronize this out of the box seamlessly.
You can use promise aggregation methods (in this case .join and .props) to map directly to the properties and get the values.
Assuming you promisified Mongoose (so Bluebird promises rather than Mongoose ones).
var story = new Models.Story();
var toArray = req.body.to; // [ 'user1', 'user2', 'user3' ]
var to = Promise.map(toArray,function(element){
return Promise.props({ // resolves all properties
user : Models.User.findOneAsync({username: element}),
username : element, // push the toArray element
view : {
inbox: true,
outbox: element == res.locals.user.username,
archive: false
},
updated : req.body.nowDatetime
});
});
var from = Promise.map(toArray,function(element){ // can be a normal map
return Promise.props({
user : res.locals._id,
username : res.locals.username,
view : {
inbox: element == res.locals.user.username,
outbox: true,
archive: archive,
},
updated : req.body.nowDatetime
});
});
Promise.join(to, from, function(to, from){
story.to = to;
story.from = from;
story.title = req.body.title;
return story.save();
}).then(function(){
console.log("Success! Story saved!");
}).catch(Promise.OperationalError, function(e){
// handle error in Mongoose save findOne etc, res.send(...)
}).catch(function(e){
// handle other exceptions here, this is most likely
// a 500 error where the top one is a 4XX, but pay close
// attention to how you handle errors here
});

Is it possible to build a dynamic task list in nodejs Async (waterfall, series, etc...)

I am pulling information from some collections in mongo that contain node and edge data. First i must get the node so i can grab its edges. Once i have a list of edges i then go back out and grab more nodes (etc.. based on a depth value). The following code is an loose example of how i am attempting to use async.waterfall and the task list.
Initially i have only a single task but once i make my first query i add to the task array. Unfortunately this does not seem to register with async and it does not continue to process the tasks i am adding.
Is there a better way to do this?
var async = require('async')
var mongoose = require('mongoose')
var _ = requrie('underscore')
var Client = this.Mongo.connect(/*path to mongo*/)
var Node = mongoose.Schema({
id : String,
graph_id : String
})
var Edge = mongoose.Schema({
id : String,
source_id : String,
destination_id : String
})
var Nodes = Client.model('myNode', Node)
var Edges = Client.model('myEdge', Edge)
var funcs = []
var round = 1
var depth = 2
var query = {
node : {
id : '12345'
},
edge : {
id : '12345'
}
}
var addTask = function(Nodes, Edges, query, round, depth) {
return function(callback) {
queryData(Nodes, Edges, query, function(err, node_list) {
if(depth > round) {
round++
function_array.push(addTask(Nodes, Edges, query, round, depth))
}
})
}
}
var queryData = function(Nodes, Edges, query, cb) {
async.waterfall([
function(callback) {
Nodes.find(query.node, function(err, nodes) {
var node_keys = _.map(nodes, function(node) {
return node.id
})
callback(null, nodes, node_keys)
})
},
function(nodes, node_keys, callback) {
query.edge.$or = [ {'source_id' : {$in:node_keys}}, {'destination_id' : {$in:node_keys}} ]
Edges.find(query.edge, function(err, edges) {
var edge_keys = _.map(edges, function(edge) {
if(edge['_doc']['source_id'] != query.node.id) {
return edge['_doc']['source_id']
} else {
return edge['_doc']['destination_id']
}
callback(null, nodes, edges, node_keys, edge_keys)
})
})
}
], function(err, nodes, edges, node_keys, edge_keys) {
// update the results object then...
cb(null, _.uniq(edge_keys)
})
}
var function_array = []
function_array.push(addTask(Nodes, Edges, query, round, depth))
async.waterfall(function_array, function(err) {
Client.disconnect()
//this should have run more than just the initial task but does not
})
--------------------- UPDATE ---------------------------
So after playing around with trying to get Async waterfall or series to do this by adding trailing functions I decided to switch to using async.whilst and am now happy with the solution.
function GraphObject() {
this.function_array = []
}
GraphObject.prototype.doStuff = function() {
this.function_array.push(this.buildFunction(100))
this.runTasks(function(err) {
console.log('done with all tasks')
}
}
GraphObject.prototype.buildFunction = function(times) {
return function(cb) {
if(times != 0) {
this.function_array.push(this.buildFunction(times - 1))
}
cb(null)
}
}
GraphObject.prototype.runTasks = function(cb) {
var tasks_run = 0
async.whilst(
function(){
return this.function_array.length > 0
}.bind(this),
function(callback) {
var func = this.function_array.shift()
func.call(this, function(err) {
tasks_run++
callback(err)
})
}.bind(this),
function(err) {
console.log('runTasks ran '+tasks_run+' tasks')
if(err) {
cb(500)
}
cb(null)
}.bind(this)
)
}
A task in your function_array can only add a new task to the array provided it is NOT the last task in the array.
In your case, your function_array contained only 1 task. That task itself cannot add additional tasks since it's the last task.
The solution is to have 2 tasks in the array. A startTask to bootstrap the process, and a finalTask that is more of a dummy task. In that case,
function_array = [startTask, finalTask];
Then startTask would add taskA, taskB will add task C and eventually
function_array = [startTask, taskA, taskB, taskC, finalTask];
The sample code below that illustrates the concepts.
var async = require('async');
var max = 6;
var nodeTask = function(taskId, value, callback){
var r = Math.floor(Math.random() * 20) + 1;
console.log("From Node Task %d: %d", taskId, r);
// add an edge task
if (taskId < max) {
function_array.splice(function_array.length-1, 0, edgeTask);
}
callback(null, taskId + 1, value + r);
};
var edgeTask = function(taskId, value, callback){
var r = Math.floor(Math.random() * 20) + 1;
console.log("From Edge Task %d: %d", taskId, r);
// add a node task
if (taskId < max) {
function_array.splice(function_array.length-1, 0, nodeTask);
}
callback(null, taskId + 1, value + r);
};
var startTask = function(callback) {
function_array.splice(function_array.length-1, 0, nodeTask);
callback(null, 1, 0);
};
var finalTask = function(taskId, value, callback) {
callback(null, value);
};
var function_array = [startTask, finalTask];
async.waterfall(function_array, function (err, result) {
console.log("Sum is ", result);
});

Resources